methodnetworkscan 0.0.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- methodnetworkscan-0.0.7/PKG-INFO +25 -0
- methodnetworkscan-0.0.7/README.md +0 -0
- methodnetworkscan-0.0.7/pyproject.toml +56 -0
- methodnetworkscan-0.0.7/src/methodnetworkscan/__init__.py +23 -0
- methodnetworkscan-0.0.7/src/methodnetworkscan/core/__init__.py +25 -0
- methodnetworkscan-0.0.7/src/methodnetworkscan/core/datetime_utils.py +28 -0
- methodnetworkscan-0.0.7/src/methodnetworkscan/core/pydantic_utilities.py +206 -0
- methodnetworkscan-0.0.7/src/methodnetworkscan/core/serialization.py +170 -0
- methodnetworkscan-0.0.7/src/methodnetworkscan/py.typed +0 -0
- methodnetworkscan-0.0.7/src/methodnetworkscan/resources/__init__.py +16 -0
- methodnetworkscan-0.0.7/src/methodnetworkscan/resources/bannergrab/__init__.py +7 -0
- methodnetworkscan-0.0.7/src/methodnetworkscan/resources/bannergrab/banner_grab.py +31 -0
- methodnetworkscan-0.0.7/src/methodnetworkscan/resources/bannergrab/banner_grab_report.py +20 -0
- methodnetworkscan-0.0.7/src/methodnetworkscan/resources/bannergrab/cookie_info.py +20 -0
- methodnetworkscan-0.0.7/src/methodnetworkscan/resources/common/__init__.py +7 -0
- methodnetworkscan-0.0.7/src/methodnetworkscan/resources/common/same_site_type.py +5 -0
- methodnetworkscan-0.0.7/src/methodnetworkscan/resources/common/service_type.py +46 -0
- methodnetworkscan-0.0.7/src/methodnetworkscan/resources/common/transport_type.py +5 -0
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: methodnetworkscan
|
|
3
|
+
Version: 0.0.7
|
|
4
|
+
Summary:
|
|
5
|
+
Requires-Python: >=3.8,<4.0
|
|
6
|
+
Classifier: Intended Audience :: Developers
|
|
7
|
+
Classifier: Operating System :: MacOS
|
|
8
|
+
Classifier: Operating System :: Microsoft :: Windows
|
|
9
|
+
Classifier: Operating System :: OS Independent
|
|
10
|
+
Classifier: Operating System :: POSIX
|
|
11
|
+
Classifier: Operating System :: POSIX :: Linux
|
|
12
|
+
Classifier: Programming Language :: Python
|
|
13
|
+
Classifier: Programming Language :: Python :: 3
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
19
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
20
|
+
Classifier: Typing :: Typed
|
|
21
|
+
Requires-Dist: pydantic (>=1.9.2)
|
|
22
|
+
Requires-Dist: pydantic-core (>=2.18.2,<3.0.0)
|
|
23
|
+
Description-Content-Type: text/markdown
|
|
24
|
+
|
|
25
|
+
|
|
File without changes
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
[tool.poetry]
|
|
2
|
+
name = "methodnetworkscan"
|
|
3
|
+
version = "v0.0.7"
|
|
4
|
+
description = ""
|
|
5
|
+
readme = "README.md"
|
|
6
|
+
authors = []
|
|
7
|
+
keywords = []
|
|
8
|
+
|
|
9
|
+
classifiers = [
|
|
10
|
+
"Intended Audience :: Developers",
|
|
11
|
+
"Programming Language :: Python",
|
|
12
|
+
"Programming Language :: Python :: 3",
|
|
13
|
+
"Programming Language :: Python :: 3.8",
|
|
14
|
+
"Programming Language :: Python :: 3.9",
|
|
15
|
+
"Programming Language :: Python :: 3.10",
|
|
16
|
+
"Programming Language :: Python :: 3.11",
|
|
17
|
+
"Programming Language :: Python :: 3.12",
|
|
18
|
+
"Operating System :: OS Independent",
|
|
19
|
+
"Operating System :: POSIX",
|
|
20
|
+
"Operating System :: MacOS",
|
|
21
|
+
"Operating System :: POSIX :: Linux",
|
|
22
|
+
"Operating System :: Microsoft :: Windows",
|
|
23
|
+
"Topic :: Software Development :: Libraries :: Python Modules",
|
|
24
|
+
"Typing :: Typed"
|
|
25
|
+
]
|
|
26
|
+
packages = [
|
|
27
|
+
{ include = "methodnetworkscan", from = "src"}
|
|
28
|
+
]
|
|
29
|
+
|
|
30
|
+
[tool.poetry.dependencies]
|
|
31
|
+
python = "^3.8"
|
|
32
|
+
pydantic = ">= 1.9.2"
|
|
33
|
+
pydantic-core = "^2.18.2"
|
|
34
|
+
|
|
35
|
+
[tool.poetry.dev-dependencies]
|
|
36
|
+
mypy = "1.0.1"
|
|
37
|
+
pytest = "^7.4.0"
|
|
38
|
+
pytest-asyncio = "^0.23.5"
|
|
39
|
+
python-dateutil = "^2.9.0"
|
|
40
|
+
types-python-dateutil = "^2.9.0.20240316"
|
|
41
|
+
ruff = "^0.5.6"
|
|
42
|
+
|
|
43
|
+
[tool.pytest.ini_options]
|
|
44
|
+
testpaths = [ "tests" ]
|
|
45
|
+
asyncio_mode = "auto"
|
|
46
|
+
|
|
47
|
+
[tool.mypy]
|
|
48
|
+
plugins = ["pydantic.mypy"]
|
|
49
|
+
|
|
50
|
+
[tool.ruff]
|
|
51
|
+
line-length = 120
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
[build-system]
|
|
55
|
+
requires = ["poetry-core"]
|
|
56
|
+
build-backend = "poetry.core.masonry.api"
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
from .resources import (
|
|
4
|
+
BannerGrab,
|
|
5
|
+
BannerGrabReport,
|
|
6
|
+
CookieInfo,
|
|
7
|
+
SameSiteType,
|
|
8
|
+
ServiceType,
|
|
9
|
+
TransportType,
|
|
10
|
+
bannergrab,
|
|
11
|
+
common,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
__all__ = [
|
|
15
|
+
"BannerGrab",
|
|
16
|
+
"BannerGrabReport",
|
|
17
|
+
"CookieInfo",
|
|
18
|
+
"SameSiteType",
|
|
19
|
+
"ServiceType",
|
|
20
|
+
"TransportType",
|
|
21
|
+
"bannergrab",
|
|
22
|
+
"common",
|
|
23
|
+
]
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
from .datetime_utils import serialize_datetime
|
|
4
|
+
from .pydantic_utilities import (
|
|
5
|
+
IS_PYDANTIC_V2,
|
|
6
|
+
UniversalBaseModel,
|
|
7
|
+
UniversalRootModel,
|
|
8
|
+
parse_obj_as,
|
|
9
|
+
universal_field_validator,
|
|
10
|
+
universal_root_validator,
|
|
11
|
+
update_forward_refs,
|
|
12
|
+
)
|
|
13
|
+
from .serialization import FieldMetadata
|
|
14
|
+
|
|
15
|
+
__all__ = [
|
|
16
|
+
"FieldMetadata",
|
|
17
|
+
"IS_PYDANTIC_V2",
|
|
18
|
+
"UniversalBaseModel",
|
|
19
|
+
"UniversalRootModel",
|
|
20
|
+
"parse_obj_as",
|
|
21
|
+
"serialize_datetime",
|
|
22
|
+
"universal_field_validator",
|
|
23
|
+
"universal_root_validator",
|
|
24
|
+
"update_forward_refs",
|
|
25
|
+
]
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def serialize_datetime(v: dt.datetime) -> str:
|
|
7
|
+
"""
|
|
8
|
+
Serialize a datetime including timezone info.
|
|
9
|
+
|
|
10
|
+
Uses the timezone info provided if present, otherwise uses the current runtime's timezone info.
|
|
11
|
+
|
|
12
|
+
UTC datetimes end in "Z" while all other timezones are represented as offset from UTC, e.g. +05:00.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
def _serialize_zoned_datetime(v: dt.datetime) -> str:
|
|
16
|
+
if v.tzinfo is not None and v.tzinfo.tzname(None) == dt.timezone.utc.tzname(None):
|
|
17
|
+
# UTC is a special case where we use "Z" at the end instead of "+00:00"
|
|
18
|
+
return v.isoformat().replace("+00:00", "Z")
|
|
19
|
+
else:
|
|
20
|
+
# Delegate to the typical +/- offset format
|
|
21
|
+
return v.isoformat()
|
|
22
|
+
|
|
23
|
+
if v.tzinfo is not None:
|
|
24
|
+
return _serialize_zoned_datetime(v)
|
|
25
|
+
else:
|
|
26
|
+
local_tz = dt.datetime.now().astimezone().tzinfo
|
|
27
|
+
localized_dt = v.replace(tzinfo=local_tz)
|
|
28
|
+
return _serialize_zoned_datetime(localized_dt)
|
|
@@ -0,0 +1,206 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
# nopycln: file
|
|
4
|
+
import datetime as dt
|
|
5
|
+
import typing
|
|
6
|
+
from collections import defaultdict
|
|
7
|
+
|
|
8
|
+
import typing_extensions
|
|
9
|
+
|
|
10
|
+
import pydantic
|
|
11
|
+
|
|
12
|
+
from .datetime_utils import serialize_datetime
|
|
13
|
+
|
|
14
|
+
IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.")
|
|
15
|
+
|
|
16
|
+
if IS_PYDANTIC_V2:
|
|
17
|
+
# isort will try to reformat the comments on these imports, which breaks mypy
|
|
18
|
+
# isort: off
|
|
19
|
+
from pydantic.v1.datetime_parse import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2
|
|
20
|
+
parse_date as parse_date,
|
|
21
|
+
)
|
|
22
|
+
from pydantic.v1.datetime_parse import ( # pyright: ignore[reportMissingImports] # Pydantic v2
|
|
23
|
+
parse_datetime as parse_datetime,
|
|
24
|
+
)
|
|
25
|
+
from pydantic.v1.json import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2
|
|
26
|
+
ENCODERS_BY_TYPE as encoders_by_type,
|
|
27
|
+
)
|
|
28
|
+
from pydantic.v1.typing import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2
|
|
29
|
+
get_args as get_args,
|
|
30
|
+
)
|
|
31
|
+
from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2
|
|
32
|
+
get_origin as get_origin,
|
|
33
|
+
)
|
|
34
|
+
from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2
|
|
35
|
+
is_literal_type as is_literal_type,
|
|
36
|
+
)
|
|
37
|
+
from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2
|
|
38
|
+
is_union as is_union,
|
|
39
|
+
)
|
|
40
|
+
from pydantic.v1.fields import ModelField as ModelField # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2
|
|
41
|
+
else:
|
|
42
|
+
from pydantic.datetime_parse import parse_date as parse_date # type: ignore # Pydantic v1
|
|
43
|
+
from pydantic.datetime_parse import parse_datetime as parse_datetime # type: ignore # Pydantic v1
|
|
44
|
+
from pydantic.fields import ModelField as ModelField # type: ignore # Pydantic v1
|
|
45
|
+
from pydantic.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore # Pydantic v1
|
|
46
|
+
from pydantic.typing import get_args as get_args # type: ignore # Pydantic v1
|
|
47
|
+
from pydantic.typing import get_origin as get_origin # type: ignore # Pydantic v1
|
|
48
|
+
from pydantic.typing import is_literal_type as is_literal_type # type: ignore # Pydantic v1
|
|
49
|
+
from pydantic.typing import is_union as is_union # type: ignore # Pydantic v1
|
|
50
|
+
|
|
51
|
+
# isort: on
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
T = typing.TypeVar("T")
|
|
55
|
+
Model = typing.TypeVar("Model", bound=pydantic.BaseModel)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def parse_obj_as(type_: typing.Type[T], object_: typing.Any) -> T:
|
|
59
|
+
if IS_PYDANTIC_V2:
|
|
60
|
+
adapter = pydantic.TypeAdapter(type_) # type: ignore # Pydantic v2
|
|
61
|
+
return adapter.validate_python(object_)
|
|
62
|
+
else:
|
|
63
|
+
return pydantic.parse_obj_as(type_, object_)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def to_jsonable_with_fallback(
|
|
67
|
+
obj: typing.Any, fallback_serializer: typing.Callable[[typing.Any], typing.Any]
|
|
68
|
+
) -> typing.Any:
|
|
69
|
+
if IS_PYDANTIC_V2:
|
|
70
|
+
from pydantic_core import to_jsonable_python
|
|
71
|
+
|
|
72
|
+
return to_jsonable_python(obj, fallback=fallback_serializer)
|
|
73
|
+
else:
|
|
74
|
+
return fallback_serializer(obj)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class UniversalBaseModel(pydantic.BaseModel):
|
|
78
|
+
class Config:
|
|
79
|
+
populate_by_name = True
|
|
80
|
+
smart_union = True
|
|
81
|
+
allow_population_by_field_name = True
|
|
82
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
83
|
+
|
|
84
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
85
|
+
kwargs_with_defaults: typing.Any = {
|
|
86
|
+
"by_alias": True,
|
|
87
|
+
"exclude_unset": True,
|
|
88
|
+
**kwargs,
|
|
89
|
+
}
|
|
90
|
+
if IS_PYDANTIC_V2:
|
|
91
|
+
return super().model_dump_json(**kwargs_with_defaults) # type: ignore # Pydantic v2
|
|
92
|
+
else:
|
|
93
|
+
return super().json(**kwargs_with_defaults)
|
|
94
|
+
|
|
95
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
96
|
+
"""
|
|
97
|
+
Override the default dict method to `exclude_unset` by default. This function patches
|
|
98
|
+
`exclude_unset` to work include fields within non-None default values.
|
|
99
|
+
"""
|
|
100
|
+
_fields_set = self.__fields_set__
|
|
101
|
+
|
|
102
|
+
fields = _get_model_fields(self.__class__)
|
|
103
|
+
for name, field in fields.items():
|
|
104
|
+
if name not in _fields_set:
|
|
105
|
+
default = _get_field_default(field)
|
|
106
|
+
|
|
107
|
+
# If the default values are non-null act like they've been set
|
|
108
|
+
# This effectively allows exclude_unset to work like exclude_none where
|
|
109
|
+
# the latter passes through intentionally set none values.
|
|
110
|
+
if default != None:
|
|
111
|
+
_fields_set.add(name)
|
|
112
|
+
|
|
113
|
+
kwargs_with_defaults_exclude_unset: typing.Any = {
|
|
114
|
+
"by_alias": True,
|
|
115
|
+
"exclude_unset": True,
|
|
116
|
+
"include": _fields_set,
|
|
117
|
+
**kwargs,
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
if IS_PYDANTIC_V2:
|
|
121
|
+
return super().model_dump(**kwargs_with_defaults_exclude_unset) # type: ignore # Pydantic v2
|
|
122
|
+
else:
|
|
123
|
+
return super().dict(**kwargs_with_defaults_exclude_unset)
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
if IS_PYDANTIC_V2:
|
|
127
|
+
|
|
128
|
+
class V2RootModel(UniversalBaseModel, pydantic.RootModel): # type: ignore # Pydantic v2
|
|
129
|
+
pass
|
|
130
|
+
|
|
131
|
+
UniversalRootModel: typing_extensions.TypeAlias = V2RootModel # type: ignore
|
|
132
|
+
else:
|
|
133
|
+
UniversalRootModel: typing_extensions.TypeAlias = UniversalBaseModel # type: ignore
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def encode_by_type(o: typing.Any) -> typing.Any:
|
|
137
|
+
encoders_by_class_tuples: typing.Dict[typing.Callable[[typing.Any], typing.Any], typing.Tuple[typing.Any, ...]] = (
|
|
138
|
+
defaultdict(tuple)
|
|
139
|
+
)
|
|
140
|
+
for type_, encoder in encoders_by_type.items():
|
|
141
|
+
encoders_by_class_tuples[encoder] += (type_,)
|
|
142
|
+
|
|
143
|
+
if type(o) in encoders_by_type:
|
|
144
|
+
return encoders_by_type[type(o)](o)
|
|
145
|
+
for encoder, classes_tuple in encoders_by_class_tuples.items():
|
|
146
|
+
if isinstance(o, classes_tuple):
|
|
147
|
+
return encoder(o)
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def update_forward_refs(model: typing.Type["Model"]) -> None:
|
|
151
|
+
if IS_PYDANTIC_V2:
|
|
152
|
+
model.model_rebuild(raise_errors=False) # type: ignore # Pydantic v2
|
|
153
|
+
else:
|
|
154
|
+
model.update_forward_refs()
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
# Mirrors Pydantic's internal typing
|
|
158
|
+
AnyCallable = typing.Callable[..., typing.Any]
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
def universal_root_validator(
|
|
162
|
+
pre: bool = False,
|
|
163
|
+
) -> typing.Callable[[AnyCallable], AnyCallable]:
|
|
164
|
+
def decorator(func: AnyCallable) -> AnyCallable:
|
|
165
|
+
if IS_PYDANTIC_V2:
|
|
166
|
+
return pydantic.model_validator(mode="before" if pre else "after")(func) # type: ignore # Pydantic v2
|
|
167
|
+
else:
|
|
168
|
+
return pydantic.root_validator(pre=pre)(func) # type: ignore # Pydantic v1
|
|
169
|
+
|
|
170
|
+
return decorator
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def universal_field_validator(field_name: str, pre: bool = False) -> typing.Callable[[AnyCallable], AnyCallable]:
|
|
174
|
+
def decorator(func: AnyCallable) -> AnyCallable:
|
|
175
|
+
if IS_PYDANTIC_V2:
|
|
176
|
+
return pydantic.field_validator(field_name, mode="before" if pre else "after")(func) # type: ignore # Pydantic v2
|
|
177
|
+
else:
|
|
178
|
+
return pydantic.validator(field_name, pre=pre)(func) # type: ignore # Pydantic v1
|
|
179
|
+
|
|
180
|
+
return decorator
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
PydanticField = typing.Union[ModelField, pydantic.fields.FieldInfo]
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
def _get_model_fields(
|
|
187
|
+
model: typing.Type["Model"],
|
|
188
|
+
) -> typing.Mapping[str, PydanticField]:
|
|
189
|
+
if IS_PYDANTIC_V2:
|
|
190
|
+
return model.model_fields # type: ignore # Pydantic v2
|
|
191
|
+
else:
|
|
192
|
+
return model.__fields__ # type: ignore # Pydantic v1
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
def _get_field_default(field: PydanticField) -> typing.Any:
|
|
196
|
+
try:
|
|
197
|
+
value = field.get_default() # type: ignore # Pydantic < v1.10.15
|
|
198
|
+
except:
|
|
199
|
+
value = field.default
|
|
200
|
+
if IS_PYDANTIC_V2:
|
|
201
|
+
from pydantic_core import PydanticUndefined
|
|
202
|
+
|
|
203
|
+
if value == PydanticUndefined:
|
|
204
|
+
return None
|
|
205
|
+
return value
|
|
206
|
+
return value
|
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import collections
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
import typing_extensions
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class FieldMetadata:
|
|
10
|
+
"""
|
|
11
|
+
Metadata class used to annotate fields to provide additional information.
|
|
12
|
+
|
|
13
|
+
Example:
|
|
14
|
+
class MyDict(TypedDict):
|
|
15
|
+
field: typing.Annotated[str, FieldMetadata(alias="field_name")]
|
|
16
|
+
|
|
17
|
+
Will serialize: `{"field": "value"}`
|
|
18
|
+
To: `{"field_name": "value"}`
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
alias: str
|
|
22
|
+
|
|
23
|
+
def __init__(self, *, alias: str) -> None:
|
|
24
|
+
self.alias = alias
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def convert_and_respect_annotation_metadata(
|
|
28
|
+
*,
|
|
29
|
+
object_: typing.Any,
|
|
30
|
+
annotation: typing.Any,
|
|
31
|
+
inner_type: typing.Optional[typing.Any] = None,
|
|
32
|
+
) -> typing.Any:
|
|
33
|
+
"""
|
|
34
|
+
Respect the metadata annotations on a field, such as aliasing. This function effectively
|
|
35
|
+
manipulates the dict-form of an object to respect the metadata annotations. This is primarily used for
|
|
36
|
+
TypedDicts, which cannot support aliasing out of the box, and can be extended for additional
|
|
37
|
+
utilities, such as defaults.
|
|
38
|
+
|
|
39
|
+
Parameters
|
|
40
|
+
----------
|
|
41
|
+
object_ : typing.Any
|
|
42
|
+
|
|
43
|
+
annotation : type
|
|
44
|
+
The type we're looking to apply typing annotations from
|
|
45
|
+
|
|
46
|
+
inner_type : typing.Optional[type]
|
|
47
|
+
|
|
48
|
+
Returns
|
|
49
|
+
-------
|
|
50
|
+
typing.Any
|
|
51
|
+
"""
|
|
52
|
+
|
|
53
|
+
if object_ is None:
|
|
54
|
+
return None
|
|
55
|
+
if inner_type is None:
|
|
56
|
+
inner_type = annotation
|
|
57
|
+
|
|
58
|
+
clean_type = _remove_annotations(inner_type)
|
|
59
|
+
if typing_extensions.is_typeddict(clean_type) and isinstance(object_, typing.Mapping):
|
|
60
|
+
return _convert_typeddict(object_, clean_type)
|
|
61
|
+
|
|
62
|
+
if (
|
|
63
|
+
# If you're iterating on a string, do not bother to coerce it to a sequence.
|
|
64
|
+
(not isinstance(object_, str))
|
|
65
|
+
and (
|
|
66
|
+
(
|
|
67
|
+
(
|
|
68
|
+
typing_extensions.get_origin(clean_type) == typing.List
|
|
69
|
+
or typing_extensions.get_origin(clean_type) == list
|
|
70
|
+
or clean_type == typing.List
|
|
71
|
+
)
|
|
72
|
+
and isinstance(object_, typing.List)
|
|
73
|
+
)
|
|
74
|
+
or (
|
|
75
|
+
(
|
|
76
|
+
typing_extensions.get_origin(clean_type) == typing.Set
|
|
77
|
+
or typing_extensions.get_origin(clean_type) == set
|
|
78
|
+
or clean_type == typing.Set
|
|
79
|
+
)
|
|
80
|
+
and isinstance(object_, typing.Set)
|
|
81
|
+
)
|
|
82
|
+
or (
|
|
83
|
+
(
|
|
84
|
+
typing_extensions.get_origin(clean_type) == typing.Sequence
|
|
85
|
+
or typing_extensions.get_origin(clean_type) == collections.abc.Sequence
|
|
86
|
+
or clean_type == typing.Sequence
|
|
87
|
+
)
|
|
88
|
+
and isinstance(object_, typing.Sequence)
|
|
89
|
+
)
|
|
90
|
+
)
|
|
91
|
+
):
|
|
92
|
+
inner_type = typing_extensions.get_args(clean_type)[0]
|
|
93
|
+
return [
|
|
94
|
+
convert_and_respect_annotation_metadata(object_=item, annotation=annotation, inner_type=inner_type)
|
|
95
|
+
for item in object_
|
|
96
|
+
]
|
|
97
|
+
|
|
98
|
+
if typing_extensions.get_origin(clean_type) == typing.Union:
|
|
99
|
+
# We should be able to ~relatively~ safely try to convert keys against all
|
|
100
|
+
# member types in the union, the edge case here is if one member aliases a field
|
|
101
|
+
# of the same name to a different name from another member
|
|
102
|
+
# Or if another member aliases a field of the same name that another member does not.
|
|
103
|
+
for member in typing_extensions.get_args(clean_type):
|
|
104
|
+
object_ = convert_and_respect_annotation_metadata(object_=object_, annotation=annotation, inner_type=member)
|
|
105
|
+
return object_
|
|
106
|
+
|
|
107
|
+
annotated_type = _get_annotation(annotation)
|
|
108
|
+
if annotated_type is None:
|
|
109
|
+
return object_
|
|
110
|
+
|
|
111
|
+
# If the object is not a TypedDict, a Union, or other container (list, set, sequence, etc.)
|
|
112
|
+
# Then we can safely call it on the recursive conversion.
|
|
113
|
+
return object_
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def _convert_typeddict(object_: typing.Mapping[str, object], expected_type: typing.Any) -> typing.Mapping[str, object]:
|
|
117
|
+
converted_object: typing.Dict[str, object] = {}
|
|
118
|
+
annotations = typing_extensions.get_type_hints(expected_type, include_extras=True)
|
|
119
|
+
for key, value in object_.items():
|
|
120
|
+
type_ = annotations.get(key)
|
|
121
|
+
if type_ is None:
|
|
122
|
+
converted_object[key] = value
|
|
123
|
+
else:
|
|
124
|
+
converted_object[_alias_key(key, type_)] = convert_and_respect_annotation_metadata(
|
|
125
|
+
object_=value, annotation=type_
|
|
126
|
+
)
|
|
127
|
+
return converted_object
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def _get_annotation(type_: typing.Any) -> typing.Optional[typing.Any]:
|
|
131
|
+
maybe_annotated_type = typing_extensions.get_origin(type_)
|
|
132
|
+
if maybe_annotated_type is None:
|
|
133
|
+
return None
|
|
134
|
+
|
|
135
|
+
if maybe_annotated_type == typing_extensions.NotRequired:
|
|
136
|
+
type_ = typing_extensions.get_args(type_)[0]
|
|
137
|
+
maybe_annotated_type = typing_extensions.get_origin(type_)
|
|
138
|
+
|
|
139
|
+
if maybe_annotated_type == typing_extensions.Annotated:
|
|
140
|
+
return type_
|
|
141
|
+
|
|
142
|
+
return None
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def _remove_annotations(type_: typing.Any) -> typing.Any:
|
|
146
|
+
maybe_annotated_type = typing_extensions.get_origin(type_)
|
|
147
|
+
if maybe_annotated_type is None:
|
|
148
|
+
return type_
|
|
149
|
+
|
|
150
|
+
if maybe_annotated_type == typing_extensions.NotRequired:
|
|
151
|
+
return _remove_annotations(typing_extensions.get_args(type_)[0])
|
|
152
|
+
|
|
153
|
+
if maybe_annotated_type == typing_extensions.Annotated:
|
|
154
|
+
return _remove_annotations(typing_extensions.get_args(type_)[0])
|
|
155
|
+
|
|
156
|
+
return type_
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def _alias_key(key: str, type_: typing.Any) -> str:
|
|
160
|
+
maybe_annotated_type = _get_annotation(type_)
|
|
161
|
+
|
|
162
|
+
if maybe_annotated_type is not None:
|
|
163
|
+
# The actual annotations are 1 onward, the first is the annotated type
|
|
164
|
+
annotations = typing_extensions.get_args(maybe_annotated_type)[1:]
|
|
165
|
+
|
|
166
|
+
for annotation in annotations:
|
|
167
|
+
if isinstance(annotation, FieldMetadata) and annotation.alias is not None:
|
|
168
|
+
return annotation.alias
|
|
169
|
+
|
|
170
|
+
return key
|
|
File without changes
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
from . import bannergrab, common
|
|
4
|
+
from .bannergrab import BannerGrab, BannerGrabReport, CookieInfo
|
|
5
|
+
from .common import SameSiteType, ServiceType, TransportType
|
|
6
|
+
|
|
7
|
+
__all__ = [
|
|
8
|
+
"BannerGrab",
|
|
9
|
+
"BannerGrabReport",
|
|
10
|
+
"CookieInfo",
|
|
11
|
+
"SameSiteType",
|
|
12
|
+
"ServiceType",
|
|
13
|
+
"TransportType",
|
|
14
|
+
"bannergrab",
|
|
15
|
+
"common",
|
|
16
|
+
]
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
from ...core.pydantic_utilities import UniversalBaseModel
|
|
4
|
+
from ..common.transport_type import TransportType
|
|
5
|
+
from ..common.service_type import ServiceType
|
|
6
|
+
import typing
|
|
7
|
+
import pydantic
|
|
8
|
+
from ..common.same_site_type import SameSiteType
|
|
9
|
+
from ...core.pydantic_utilities import IS_PYDANTIC_V2
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class BannerGrab(UniversalBaseModel):
|
|
13
|
+
host: str
|
|
14
|
+
ip: str
|
|
15
|
+
port: int
|
|
16
|
+
tls: bool
|
|
17
|
+
version: str
|
|
18
|
+
transport: TransportType
|
|
19
|
+
service: ServiceType
|
|
20
|
+
status_code: typing.Optional[str] = pydantic.Field(alias="statusCode", default=None)
|
|
21
|
+
connection: typing.Optional[str] = None
|
|
22
|
+
content_type: typing.Optional[str] = pydantic.Field(alias="contentType", default=None)
|
|
23
|
+
same_site: typing.Optional[SameSiteType] = pydantic.Field(alias="sameSite", default=None)
|
|
24
|
+
metadata: typing.Optional[typing.Dict[str, str]] = None
|
|
25
|
+
|
|
26
|
+
if IS_PYDANTIC_V2:
|
|
27
|
+
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow") # type: ignore # Pydantic v2
|
|
28
|
+
else:
|
|
29
|
+
|
|
30
|
+
class Config:
|
|
31
|
+
extra = pydantic.Extra.allow
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
from ...core.pydantic_utilities import UniversalBaseModel
|
|
4
|
+
import typing
|
|
5
|
+
from .banner_grab import BannerGrab
|
|
6
|
+
import pydantic
|
|
7
|
+
from ...core.pydantic_utilities import IS_PYDANTIC_V2
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class BannerGrabReport(UniversalBaseModel):
|
|
11
|
+
target: str
|
|
12
|
+
banner_grabs: typing.Optional[typing.List[BannerGrab]] = pydantic.Field(alias="bannerGrabs", default=None)
|
|
13
|
+
errors: typing.Optional[typing.List[str]] = None
|
|
14
|
+
|
|
15
|
+
if IS_PYDANTIC_V2:
|
|
16
|
+
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow") # type: ignore # Pydantic v2
|
|
17
|
+
else:
|
|
18
|
+
|
|
19
|
+
class Config:
|
|
20
|
+
extra = pydantic.Extra.allow
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
from ...core.pydantic_utilities import UniversalBaseModel
|
|
4
|
+
import datetime as dt
|
|
5
|
+
from ...core.pydantic_utilities import IS_PYDANTIC_V2
|
|
6
|
+
import typing
|
|
7
|
+
import pydantic
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class CookieInfo(UniversalBaseModel):
|
|
11
|
+
data: str
|
|
12
|
+
expiration: dt.datetime
|
|
13
|
+
samesite: bool
|
|
14
|
+
|
|
15
|
+
if IS_PYDANTIC_V2:
|
|
16
|
+
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow") # type: ignore # Pydantic v2
|
|
17
|
+
else:
|
|
18
|
+
|
|
19
|
+
class Config:
|
|
20
|
+
extra = pydantic.Extra.allow
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import typing
|
|
4
|
+
|
|
5
|
+
ServiceType = typing.Union[
|
|
6
|
+
typing.Literal[
|
|
7
|
+
"HTTP",
|
|
8
|
+
"HTTPS",
|
|
9
|
+
"FTP",
|
|
10
|
+
"SFTP",
|
|
11
|
+
"SSH",
|
|
12
|
+
"TELNET",
|
|
13
|
+
"SMTP",
|
|
14
|
+
"SMTPS",
|
|
15
|
+
"POP3",
|
|
16
|
+
"POP3S",
|
|
17
|
+
"IMAP",
|
|
18
|
+
"IMAPS",
|
|
19
|
+
"DNS",
|
|
20
|
+
"SNMP",
|
|
21
|
+
"KAFKA",
|
|
22
|
+
"MQTT3",
|
|
23
|
+
"MQTT5",
|
|
24
|
+
"RDP",
|
|
25
|
+
"REDIS",
|
|
26
|
+
"LDAP",
|
|
27
|
+
"LDAPS",
|
|
28
|
+
"RSYNC",
|
|
29
|
+
"RPC",
|
|
30
|
+
"POSTGRESSQL",
|
|
31
|
+
"MYSQL",
|
|
32
|
+
"MSSQL",
|
|
33
|
+
"ORACLEDB",
|
|
34
|
+
"VNC",
|
|
35
|
+
"MODBUS",
|
|
36
|
+
"SMB",
|
|
37
|
+
"IPSEC",
|
|
38
|
+
"STUN",
|
|
39
|
+
"RTSP",
|
|
40
|
+
"DHCP",
|
|
41
|
+
"NTP",
|
|
42
|
+
"OPENVPN",
|
|
43
|
+
"UNKNOWN",
|
|
44
|
+
],
|
|
45
|
+
typing.Any,
|
|
46
|
+
]
|