methodwebscan 0.0.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. methodwebscan/__init__.py +71 -0
  2. methodwebscan/core/__init__.py +25 -0
  3. methodwebscan/core/datetime_utils.py +28 -0
  4. methodwebscan/core/pydantic_utilities.py +249 -0
  5. methodwebscan/core/serialization.py +254 -0
  6. methodwebscan/py.typed +0 -0
  7. methodwebscan/resources/__init__.py +54 -0
  8. methodwebscan/resources/common/__init__.py +5 -0
  9. methodwebscan/resources/common/tls_version.py +7 -0
  10. methodwebscan/resources/fingerprint/__init__.py +10 -0
  11. methodwebscan/resources/fingerprint/certificate.py +39 -0
  12. methodwebscan/resources/fingerprint/fingerprint_report.py +31 -0
  13. methodwebscan/resources/fingerprint/http_headers.py +33 -0
  14. methodwebscan/resources/fingerprint/public_key_algorithm.py +5 -0
  15. methodwebscan/resources/fingerprint/signature_algorithm.py +25 -0
  16. methodwebscan/resources/fingerprint/tls_info.py +23 -0
  17. methodwebscan/resources/fuzzpath/__init__.py +6 -0
  18. methodwebscan/resources/fuzzpath/fuzz_path_report.py +25 -0
  19. methodwebscan/resources/fuzzpath/url_details.py +19 -0
  20. methodwebscan/resources/graphql/__init__.py +10 -0
  21. methodwebscan/resources/graphql/graph_ql_data.py +20 -0
  22. methodwebscan/resources/graphql/graph_ql_field.py +17 -0
  23. methodwebscan/resources/graphql/graph_ql_query.py +18 -0
  24. methodwebscan/resources/graphql/graph_ql_schema.py +18 -0
  25. methodwebscan/resources/graphql/graph_ql_schema_data.py +18 -0
  26. methodwebscan/resources/graphql/graph_ql_type.py +21 -0
  27. methodwebscan/resources/routes/__init__.py +23 -0
  28. methodwebscan/resources/routes/api_type.py +5 -0
  29. methodwebscan/resources/routes/o_auth_flow.py +22 -0
  30. methodwebscan/resources/routes/o_auth_flows.py +27 -0
  31. methodwebscan/resources/routes/route.py +31 -0
  32. methodwebscan/resources/routes/routes_report.py +37 -0
  33. methodwebscan/resources/routes/security_requirement.py +17 -0
  34. methodwebscan/resources/routes/security_scheme.py +35 -0
  35. methodwebscan/resources/routes/security_scheme_name.py +3 -0
  36. methodwebscan/resources/routes/security_scheme_type.py +7 -0
  37. methodwebscan/resources/webpagecapture/__init__.py +6 -0
  38. methodwebscan/resources/webpagecapture/webpage_capture_report.py +19 -0
  39. methodwebscan/resources/webpagecapture/webpage_screenshot_report.py +19 -0
  40. methodwebscan-0.0.17.dist-info/METADATA +25 -0
  41. methodwebscan-0.0.17.dist-info/RECORD +42 -0
  42. methodwebscan-0.0.17.dist-info/WHEEL +4 -0
@@ -0,0 +1,71 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from .resources import (
4
+ ApiType,
5
+ Certificate,
6
+ FingerprintReport,
7
+ FuzzPathReport,
8
+ GraphQlData,
9
+ GraphQlField,
10
+ GraphQlQuery,
11
+ GraphQlSchema,
12
+ GraphQlSchemaData,
13
+ GraphQlType,
14
+ HttpHeaders,
15
+ OAuthFlow,
16
+ OAuthFlows,
17
+ PublicKeyAlgorithm,
18
+ Route,
19
+ RoutesReport,
20
+ SecurityRequirement,
21
+ SecurityScheme,
22
+ SecuritySchemeName,
23
+ SecuritySchemeType,
24
+ SignatureAlgorithm,
25
+ TlsInfo,
26
+ TlsVersion,
27
+ UrlDetails,
28
+ WebpageCaptureReport,
29
+ WebpageScreenshotReport,
30
+ common,
31
+ fingerprint,
32
+ fuzzpath,
33
+ graphql,
34
+ routes,
35
+ webpagecapture,
36
+ )
37
+
38
+ __all__ = [
39
+ "ApiType",
40
+ "Certificate",
41
+ "FingerprintReport",
42
+ "FuzzPathReport",
43
+ "GraphQlData",
44
+ "GraphQlField",
45
+ "GraphQlQuery",
46
+ "GraphQlSchema",
47
+ "GraphQlSchemaData",
48
+ "GraphQlType",
49
+ "HttpHeaders",
50
+ "OAuthFlow",
51
+ "OAuthFlows",
52
+ "PublicKeyAlgorithm",
53
+ "Route",
54
+ "RoutesReport",
55
+ "SecurityRequirement",
56
+ "SecurityScheme",
57
+ "SecuritySchemeName",
58
+ "SecuritySchemeType",
59
+ "SignatureAlgorithm",
60
+ "TlsInfo",
61
+ "TlsVersion",
62
+ "UrlDetails",
63
+ "WebpageCaptureReport",
64
+ "WebpageScreenshotReport",
65
+ "common",
66
+ "fingerprint",
67
+ "fuzzpath",
68
+ "graphql",
69
+ "routes",
70
+ "webpagecapture",
71
+ ]
@@ -0,0 +1,25 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from .datetime_utils import serialize_datetime
4
+ from .pydantic_utilities import (
5
+ IS_PYDANTIC_V2,
6
+ UniversalBaseModel,
7
+ UniversalRootModel,
8
+ parse_obj_as,
9
+ universal_field_validator,
10
+ universal_root_validator,
11
+ update_forward_refs,
12
+ )
13
+ from .serialization import FieldMetadata
14
+
15
+ __all__ = [
16
+ "FieldMetadata",
17
+ "IS_PYDANTIC_V2",
18
+ "UniversalBaseModel",
19
+ "UniversalRootModel",
20
+ "parse_obj_as",
21
+ "serialize_datetime",
22
+ "universal_field_validator",
23
+ "universal_root_validator",
24
+ "update_forward_refs",
25
+ ]
@@ -0,0 +1,28 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+
5
+
6
+ def serialize_datetime(v: dt.datetime) -> str:
7
+ """
8
+ Serialize a datetime including timezone info.
9
+
10
+ Uses the timezone info provided if present, otherwise uses the current runtime's timezone info.
11
+
12
+ UTC datetimes end in "Z" while all other timezones are represented as offset from UTC, e.g. +05:00.
13
+ """
14
+
15
+ def _serialize_zoned_datetime(v: dt.datetime) -> str:
16
+ if v.tzinfo is not None and v.tzinfo.tzname(None) == dt.timezone.utc.tzname(None):
17
+ # UTC is a special case where we use "Z" at the end instead of "+00:00"
18
+ return v.isoformat().replace("+00:00", "Z")
19
+ else:
20
+ # Delegate to the typical +/- offset format
21
+ return v.isoformat()
22
+
23
+ if v.tzinfo is not None:
24
+ return _serialize_zoned_datetime(v)
25
+ else:
26
+ local_tz = dt.datetime.now().astimezone().tzinfo
27
+ localized_dt = v.replace(tzinfo=local_tz)
28
+ return _serialize_zoned_datetime(localized_dt)
@@ -0,0 +1,249 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ # nopycln: file
4
+ import datetime as dt
5
+ import typing
6
+ from collections import defaultdict
7
+
8
+ import typing_extensions
9
+
10
+ import pydantic
11
+
12
+ from .datetime_utils import serialize_datetime
13
+ from .serialization import convert_and_respect_annotation_metadata
14
+
15
+ IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.")
16
+
17
+ if IS_PYDANTIC_V2:
18
+ # isort will try to reformat the comments on these imports, which breaks mypy
19
+ # isort: off
20
+ from pydantic.v1.datetime_parse import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2
21
+ parse_date as parse_date,
22
+ )
23
+ from pydantic.v1.datetime_parse import ( # pyright: ignore[reportMissingImports] # Pydantic v2
24
+ parse_datetime as parse_datetime,
25
+ )
26
+ from pydantic.v1.json import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2
27
+ ENCODERS_BY_TYPE as encoders_by_type,
28
+ )
29
+ from pydantic.v1.typing import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2
30
+ get_args as get_args,
31
+ )
32
+ from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2
33
+ get_origin as get_origin,
34
+ )
35
+ from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2
36
+ is_literal_type as is_literal_type,
37
+ )
38
+ from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2
39
+ is_union as is_union,
40
+ )
41
+ from pydantic.v1.fields import ModelField as ModelField # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2
42
+ else:
43
+ from pydantic.datetime_parse import parse_date as parse_date # type: ignore # Pydantic v1
44
+ from pydantic.datetime_parse import parse_datetime as parse_datetime # type: ignore # Pydantic v1
45
+ from pydantic.fields import ModelField as ModelField # type: ignore # Pydantic v1
46
+ from pydantic.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore # Pydantic v1
47
+ from pydantic.typing import get_args as get_args # type: ignore # Pydantic v1
48
+ from pydantic.typing import get_origin as get_origin # type: ignore # Pydantic v1
49
+ from pydantic.typing import is_literal_type as is_literal_type # type: ignore # Pydantic v1
50
+ from pydantic.typing import is_union as is_union # type: ignore # Pydantic v1
51
+
52
+ # isort: on
53
+
54
+
55
+ T = typing.TypeVar("T")
56
+ Model = typing.TypeVar("Model", bound=pydantic.BaseModel)
57
+
58
+
59
+ def parse_obj_as(type_: typing.Type[T], object_: typing.Any) -> T:
60
+ dealiased_object = convert_and_respect_annotation_metadata(object_=object_, annotation=type_, direction="read")
61
+ if IS_PYDANTIC_V2:
62
+ adapter = pydantic.TypeAdapter(type_) # type: ignore # Pydantic v2
63
+ return adapter.validate_python(dealiased_object)
64
+ else:
65
+ return pydantic.parse_obj_as(type_, dealiased_object)
66
+
67
+
68
+ def to_jsonable_with_fallback(
69
+ obj: typing.Any, fallback_serializer: typing.Callable[[typing.Any], typing.Any]
70
+ ) -> typing.Any:
71
+ if IS_PYDANTIC_V2:
72
+ from pydantic_core import to_jsonable_python
73
+
74
+ return to_jsonable_python(obj, fallback=fallback_serializer)
75
+ else:
76
+ return fallback_serializer(obj)
77
+
78
+
79
+ class UniversalBaseModel(pydantic.BaseModel):
80
+ if IS_PYDANTIC_V2:
81
+ model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(
82
+ protected_namespaces=(),
83
+ json_encoders={dt.datetime: serialize_datetime},
84
+ ) # type: ignore # Pydantic v2
85
+ else:
86
+
87
+ class Config:
88
+ smart_union = True
89
+ json_encoders = {dt.datetime: serialize_datetime}
90
+
91
+ def json(self, **kwargs: typing.Any) -> str:
92
+ kwargs_with_defaults: typing.Any = {
93
+ "by_alias": True,
94
+ "exclude_unset": True,
95
+ **kwargs,
96
+ }
97
+ if IS_PYDANTIC_V2:
98
+ return super().model_dump_json(**kwargs_with_defaults) # type: ignore # Pydantic v2
99
+ else:
100
+ return super().json(**kwargs_with_defaults)
101
+
102
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
103
+ """
104
+ Override the default dict method to `exclude_unset` by default. This function patches
105
+ `exclude_unset` to work include fields within non-None default values.
106
+ """
107
+ # Note: the logic here is multi-plexed given the levers exposed in Pydantic V1 vs V2
108
+ # Pydantic V1's .dict can be extremely slow, so we do not want to call it twice.
109
+ #
110
+ # We'd ideally do the same for Pydantic V2, but it shells out to a library to serialize models
111
+ # that we have less control over, and this is less intrusive than custom serializers for now.
112
+ if IS_PYDANTIC_V2:
113
+ kwargs_with_defaults_exclude_unset: typing.Any = {
114
+ **kwargs,
115
+ "by_alias": True,
116
+ "exclude_unset": True,
117
+ "exclude_none": False,
118
+ }
119
+ kwargs_with_defaults_exclude_none: typing.Any = {
120
+ **kwargs,
121
+ "by_alias": True,
122
+ "exclude_none": True,
123
+ "exclude_unset": False,
124
+ }
125
+ dict_dump = deep_union_pydantic_dicts(
126
+ super().model_dump(**kwargs_with_defaults_exclude_unset), # type: ignore # Pydantic v2
127
+ super().model_dump(**kwargs_with_defaults_exclude_none), # type: ignore # Pydantic v2
128
+ )
129
+
130
+ else:
131
+ _fields_set = self.__fields_set__
132
+
133
+ fields = _get_model_fields(self.__class__)
134
+ for name, field in fields.items():
135
+ if name not in _fields_set:
136
+ default = _get_field_default(field)
137
+
138
+ # If the default values are non-null act like they've been set
139
+ # This effectively allows exclude_unset to work like exclude_none where
140
+ # the latter passes through intentionally set none values.
141
+ if default != None:
142
+ _fields_set.add(name)
143
+
144
+ kwargs_with_defaults_exclude_unset_include_fields: typing.Any = {
145
+ "by_alias": True,
146
+ "exclude_unset": True,
147
+ "include": _fields_set,
148
+ **kwargs,
149
+ }
150
+
151
+ dict_dump = super().dict(**kwargs_with_defaults_exclude_unset_include_fields)
152
+
153
+ return convert_and_respect_annotation_metadata(object_=dict_dump, annotation=self.__class__, direction="write")
154
+
155
+
156
+ def deep_union_pydantic_dicts(
157
+ source: typing.Dict[str, typing.Any], destination: typing.Dict[str, typing.Any]
158
+ ) -> typing.Dict[str, typing.Any]:
159
+ for key, value in source.items():
160
+ if isinstance(value, dict):
161
+ node = destination.setdefault(key, {})
162
+ deep_union_pydantic_dicts(value, node)
163
+ else:
164
+ destination[key] = value
165
+
166
+ return destination
167
+
168
+
169
+ if IS_PYDANTIC_V2:
170
+
171
+ class V2RootModel(UniversalBaseModel, pydantic.RootModel): # type: ignore # Pydantic v2
172
+ pass
173
+
174
+ UniversalRootModel: typing_extensions.TypeAlias = V2RootModel # type: ignore
175
+ else:
176
+ UniversalRootModel: typing_extensions.TypeAlias = UniversalBaseModel # type: ignore
177
+
178
+
179
+ def encode_by_type(o: typing.Any) -> typing.Any:
180
+ encoders_by_class_tuples: typing.Dict[typing.Callable[[typing.Any], typing.Any], typing.Tuple[typing.Any, ...]] = (
181
+ defaultdict(tuple)
182
+ )
183
+ for type_, encoder in encoders_by_type.items():
184
+ encoders_by_class_tuples[encoder] += (type_,)
185
+
186
+ if type(o) in encoders_by_type:
187
+ return encoders_by_type[type(o)](o)
188
+ for encoder, classes_tuple in encoders_by_class_tuples.items():
189
+ if isinstance(o, classes_tuple):
190
+ return encoder(o)
191
+
192
+
193
+ def update_forward_refs(model: typing.Type["Model"]) -> None:
194
+ if IS_PYDANTIC_V2:
195
+ model.model_rebuild(raise_errors=False) # type: ignore # Pydantic v2
196
+ else:
197
+ model.update_forward_refs()
198
+
199
+
200
+ # Mirrors Pydantic's internal typing
201
+ AnyCallable = typing.Callable[..., typing.Any]
202
+
203
+
204
+ def universal_root_validator(
205
+ pre: bool = False,
206
+ ) -> typing.Callable[[AnyCallable], AnyCallable]:
207
+ def decorator(func: AnyCallable) -> AnyCallable:
208
+ if IS_PYDANTIC_V2:
209
+ return pydantic.model_validator(mode="before" if pre else "after")(func) # type: ignore # Pydantic v2
210
+ else:
211
+ return pydantic.root_validator(pre=pre)(func) # type: ignore # Pydantic v1
212
+
213
+ return decorator
214
+
215
+
216
+ def universal_field_validator(field_name: str, pre: bool = False) -> typing.Callable[[AnyCallable], AnyCallable]:
217
+ def decorator(func: AnyCallable) -> AnyCallable:
218
+ if IS_PYDANTIC_V2:
219
+ return pydantic.field_validator(field_name, mode="before" if pre else "after")(func) # type: ignore # Pydantic v2
220
+ else:
221
+ return pydantic.validator(field_name, pre=pre)(func) # type: ignore # Pydantic v1
222
+
223
+ return decorator
224
+
225
+
226
+ PydanticField = typing.Union[ModelField, pydantic.fields.FieldInfo]
227
+
228
+
229
+ def _get_model_fields(
230
+ model: typing.Type["Model"],
231
+ ) -> typing.Mapping[str, PydanticField]:
232
+ if IS_PYDANTIC_V2:
233
+ return model.model_fields # type: ignore # Pydantic v2
234
+ else:
235
+ return model.__fields__ # type: ignore # Pydantic v1
236
+
237
+
238
+ def _get_field_default(field: PydanticField) -> typing.Any:
239
+ try:
240
+ value = field.get_default() # type: ignore # Pydantic < v1.10.15
241
+ except:
242
+ value = field.default
243
+ if IS_PYDANTIC_V2:
244
+ from pydantic_core import PydanticUndefined
245
+
246
+ if value == PydanticUndefined:
247
+ return None
248
+ return value
249
+ return value
@@ -0,0 +1,254 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import collections
4
+ import inspect
5
+ import typing
6
+
7
+ import typing_extensions
8
+
9
+ import pydantic
10
+
11
+
12
+ class FieldMetadata:
13
+ """
14
+ Metadata class used to annotate fields to provide additional information.
15
+
16
+ Example:
17
+ class MyDict(TypedDict):
18
+ field: typing.Annotated[str, FieldMetadata(alias="field_name")]
19
+
20
+ Will serialize: `{"field": "value"}`
21
+ To: `{"field_name": "value"}`
22
+ """
23
+
24
+ alias: str
25
+
26
+ def __init__(self, *, alias: str) -> None:
27
+ self.alias = alias
28
+
29
+
30
+ def convert_and_respect_annotation_metadata(
31
+ *,
32
+ object_: typing.Any,
33
+ annotation: typing.Any,
34
+ inner_type: typing.Optional[typing.Any] = None,
35
+ direction: typing.Literal["read", "write"],
36
+ ) -> typing.Any:
37
+ """
38
+ Respect the metadata annotations on a field, such as aliasing. This function effectively
39
+ manipulates the dict-form of an object to respect the metadata annotations. This is primarily used for
40
+ TypedDicts, which cannot support aliasing out of the box, and can be extended for additional
41
+ utilities, such as defaults.
42
+
43
+ Parameters
44
+ ----------
45
+ object_ : typing.Any
46
+
47
+ annotation : type
48
+ The type we're looking to apply typing annotations from
49
+
50
+ inner_type : typing.Optional[type]
51
+
52
+ Returns
53
+ -------
54
+ typing.Any
55
+ """
56
+
57
+ if object_ is None:
58
+ return None
59
+ if inner_type is None:
60
+ inner_type = annotation
61
+
62
+ clean_type = _remove_annotations(inner_type)
63
+ # Pydantic models
64
+ if (
65
+ inspect.isclass(clean_type)
66
+ and issubclass(clean_type, pydantic.BaseModel)
67
+ and isinstance(object_, typing.Mapping)
68
+ ):
69
+ return _convert_mapping(object_, clean_type, direction)
70
+ # TypedDicts
71
+ if typing_extensions.is_typeddict(clean_type) and isinstance(object_, typing.Mapping):
72
+ return _convert_mapping(object_, clean_type, direction)
73
+
74
+ # If you're iterating on a string, do not bother to coerce it to a sequence.
75
+ if not isinstance(object_, str):
76
+ if (
77
+ typing_extensions.get_origin(clean_type) == typing.Set
78
+ or typing_extensions.get_origin(clean_type) == set
79
+ or clean_type == typing.Set
80
+ ) and isinstance(object_, typing.Set):
81
+ inner_type = typing_extensions.get_args(clean_type)[0]
82
+ return {
83
+ convert_and_respect_annotation_metadata(
84
+ object_=item,
85
+ annotation=annotation,
86
+ inner_type=inner_type,
87
+ direction=direction,
88
+ )
89
+ for item in object_
90
+ }
91
+ elif (
92
+ (
93
+ typing_extensions.get_origin(clean_type) == typing.List
94
+ or typing_extensions.get_origin(clean_type) == list
95
+ or clean_type == typing.List
96
+ )
97
+ and isinstance(object_, typing.List)
98
+ ) or (
99
+ (
100
+ typing_extensions.get_origin(clean_type) == typing.Sequence
101
+ or typing_extensions.get_origin(clean_type) == collections.abc.Sequence
102
+ or clean_type == typing.Sequence
103
+ )
104
+ and isinstance(object_, typing.Sequence)
105
+ ):
106
+ inner_type = typing_extensions.get_args(clean_type)[0]
107
+ return [
108
+ convert_and_respect_annotation_metadata(
109
+ object_=item,
110
+ annotation=annotation,
111
+ inner_type=inner_type,
112
+ direction=direction,
113
+ )
114
+ for item in object_
115
+ ]
116
+
117
+ if typing_extensions.get_origin(clean_type) == typing.Union:
118
+ # We should be able to ~relatively~ safely try to convert keys against all
119
+ # member types in the union, the edge case here is if one member aliases a field
120
+ # of the same name to a different name from another member
121
+ # Or if another member aliases a field of the same name that another member does not.
122
+ for member in typing_extensions.get_args(clean_type):
123
+ object_ = convert_and_respect_annotation_metadata(
124
+ object_=object_,
125
+ annotation=annotation,
126
+ inner_type=member,
127
+ direction=direction,
128
+ )
129
+ return object_
130
+
131
+ annotated_type = _get_annotation(annotation)
132
+ if annotated_type is None:
133
+ return object_
134
+
135
+ # If the object is not a TypedDict, a Union, or other container (list, set, sequence, etc.)
136
+ # Then we can safely call it on the recursive conversion.
137
+ return object_
138
+
139
+
140
+ def _convert_mapping(
141
+ object_: typing.Mapping[str, object],
142
+ expected_type: typing.Any,
143
+ direction: typing.Literal["read", "write"],
144
+ ) -> typing.Mapping[str, object]:
145
+ converted_object: typing.Dict[str, object] = {}
146
+ annotations = typing_extensions.get_type_hints(expected_type, include_extras=True)
147
+ aliases_to_field_names = _get_alias_to_field_name(annotations)
148
+ for key, value in object_.items():
149
+ if direction == "read" and key in aliases_to_field_names:
150
+ dealiased_key = aliases_to_field_names.get(key)
151
+ if dealiased_key is not None:
152
+ type_ = annotations.get(dealiased_key)
153
+ else:
154
+ type_ = annotations.get(key)
155
+ # Note you can't get the annotation by the field name if you're in read mode, so you must check the aliases map
156
+ #
157
+ # So this is effectively saying if we're in write mode, and we don't have a type, or if we're in read mode and we don't have an alias
158
+ # then we can just pass the value through as is
159
+ if type_ is None:
160
+ converted_object[key] = value
161
+ elif direction == "read" and key not in aliases_to_field_names:
162
+ converted_object[key] = convert_and_respect_annotation_metadata(
163
+ object_=value, annotation=type_, direction=direction
164
+ )
165
+ else:
166
+ converted_object[_alias_key(key, type_, direction, aliases_to_field_names)] = (
167
+ convert_and_respect_annotation_metadata(object_=value, annotation=type_, direction=direction)
168
+ )
169
+ return converted_object
170
+
171
+
172
+ def _get_annotation(type_: typing.Any) -> typing.Optional[typing.Any]:
173
+ maybe_annotated_type = typing_extensions.get_origin(type_)
174
+ if maybe_annotated_type is None:
175
+ return None
176
+
177
+ if maybe_annotated_type == typing_extensions.NotRequired:
178
+ type_ = typing_extensions.get_args(type_)[0]
179
+ maybe_annotated_type = typing_extensions.get_origin(type_)
180
+
181
+ if maybe_annotated_type == typing_extensions.Annotated:
182
+ return type_
183
+
184
+ return None
185
+
186
+
187
+ def _remove_annotations(type_: typing.Any) -> typing.Any:
188
+ maybe_annotated_type = typing_extensions.get_origin(type_)
189
+ if maybe_annotated_type is None:
190
+ return type_
191
+
192
+ if maybe_annotated_type == typing_extensions.NotRequired:
193
+ return _remove_annotations(typing_extensions.get_args(type_)[0])
194
+
195
+ if maybe_annotated_type == typing_extensions.Annotated:
196
+ return _remove_annotations(typing_extensions.get_args(type_)[0])
197
+
198
+ return type_
199
+
200
+
201
+ def get_alias_to_field_mapping(type_: typing.Any) -> typing.Dict[str, str]:
202
+ annotations = typing_extensions.get_type_hints(type_, include_extras=True)
203
+ return _get_alias_to_field_name(annotations)
204
+
205
+
206
+ def get_field_to_alias_mapping(type_: typing.Any) -> typing.Dict[str, str]:
207
+ annotations = typing_extensions.get_type_hints(type_, include_extras=True)
208
+ return _get_field_to_alias_name(annotations)
209
+
210
+
211
+ def _get_alias_to_field_name(
212
+ field_to_hint: typing.Dict[str, typing.Any],
213
+ ) -> typing.Dict[str, str]:
214
+ aliases = {}
215
+ for field, hint in field_to_hint.items():
216
+ maybe_alias = _get_alias_from_type(hint)
217
+ if maybe_alias is not None:
218
+ aliases[maybe_alias] = field
219
+ return aliases
220
+
221
+
222
+ def _get_field_to_alias_name(
223
+ field_to_hint: typing.Dict[str, typing.Any],
224
+ ) -> typing.Dict[str, str]:
225
+ aliases = {}
226
+ for field, hint in field_to_hint.items():
227
+ maybe_alias = _get_alias_from_type(hint)
228
+ if maybe_alias is not None:
229
+ aliases[field] = maybe_alias
230
+ return aliases
231
+
232
+
233
+ def _get_alias_from_type(type_: typing.Any) -> typing.Optional[str]:
234
+ maybe_annotated_type = _get_annotation(type_)
235
+
236
+ if maybe_annotated_type is not None:
237
+ # The actual annotations are 1 onward, the first is the annotated type
238
+ annotations = typing_extensions.get_args(maybe_annotated_type)[1:]
239
+
240
+ for annotation in annotations:
241
+ if isinstance(annotation, FieldMetadata) and annotation.alias is not None:
242
+ return annotation.alias
243
+ return None
244
+
245
+
246
+ def _alias_key(
247
+ key: str,
248
+ type_: typing.Any,
249
+ direction: typing.Literal["read", "write"],
250
+ aliases_to_field_names: typing.Dict[str, str],
251
+ ) -> str:
252
+ if direction == "read":
253
+ return aliases_to_field_names.get(key, key)
254
+ return _get_alias_from_type(type_=type_) or key
methodwebscan/py.typed ADDED
File without changes
@@ -0,0 +1,54 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from . import common, fingerprint, fuzzpath, graphql, routes, webpagecapture
4
+ from .common import TlsVersion
5
+ from .fingerprint import Certificate, FingerprintReport, HttpHeaders, PublicKeyAlgorithm, SignatureAlgorithm, TlsInfo
6
+ from .fuzzpath import FuzzPathReport, UrlDetails
7
+ from .graphql import GraphQlData, GraphQlField, GraphQlQuery, GraphQlSchema, GraphQlSchemaData, GraphQlType
8
+ from .routes import (
9
+ ApiType,
10
+ OAuthFlow,
11
+ OAuthFlows,
12
+ Route,
13
+ RoutesReport,
14
+ SecurityRequirement,
15
+ SecurityScheme,
16
+ SecuritySchemeName,
17
+ SecuritySchemeType,
18
+ )
19
+ from .webpagecapture import WebpageCaptureReport, WebpageScreenshotReport
20
+
21
+ __all__ = [
22
+ "ApiType",
23
+ "Certificate",
24
+ "FingerprintReport",
25
+ "FuzzPathReport",
26
+ "GraphQlData",
27
+ "GraphQlField",
28
+ "GraphQlQuery",
29
+ "GraphQlSchema",
30
+ "GraphQlSchemaData",
31
+ "GraphQlType",
32
+ "HttpHeaders",
33
+ "OAuthFlow",
34
+ "OAuthFlows",
35
+ "PublicKeyAlgorithm",
36
+ "Route",
37
+ "RoutesReport",
38
+ "SecurityRequirement",
39
+ "SecurityScheme",
40
+ "SecuritySchemeName",
41
+ "SecuritySchemeType",
42
+ "SignatureAlgorithm",
43
+ "TlsInfo",
44
+ "TlsVersion",
45
+ "UrlDetails",
46
+ "WebpageCaptureReport",
47
+ "WebpageScreenshotReport",
48
+ "common",
49
+ "fingerprint",
50
+ "fuzzpath",
51
+ "graphql",
52
+ "routes",
53
+ "webpagecapture",
54
+ ]
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from .tls_version import TlsVersion
4
+
5
+ __all__ = ["TlsVersion"]
@@ -0,0 +1,7 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ TlsVersion = typing.Union[
6
+ typing.Literal["SSL10", "SSL20", "SSL30", "TLS10", "TLS11", "TLS12", "TLS13", "UNKNOWN"], typing.Any
7
+ ]