mm-http 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mm_http-0.1.0/.gitignore +16 -0
- mm_http-0.1.0/.pre-commit-config.yaml +10 -0
- mm_http-0.1.0/PKG-INFO +9 -0
- mm_http-0.1.0/README.md +197 -0
- mm_http-0.1.0/dict.dic +0 -0
- mm_http-0.1.0/justfile +40 -0
- mm_http-0.1.0/pyproject.toml +84 -0
- mm_http-0.1.0/src/mm_http/__init__.py +5 -0
- mm_http-0.1.0/src/mm_http/http_request.py +128 -0
- mm_http-0.1.0/src/mm_http/http_request_sync.py +63 -0
- mm_http-0.1.0/src/mm_http/py.typed +0 -0
- mm_http-0.1.0/src/mm_http/response.py +90 -0
- mm_http-0.1.0/tests/__init__.py +0 -0
- mm_http-0.1.0/tests/conftest.py +22 -0
- mm_http-0.1.0/tests/test_http_request.py +95 -0
- mm_http-0.1.0/tests/test_http_request_sync.py +95 -0
- mm_http-0.1.0/tests/test_http_response.py +108 -0
- mm_http-0.1.0/uv.lock +1021 -0
mm_http-0.1.0/.gitignore
ADDED
mm_http-0.1.0/PKG-INFO
ADDED
mm_http-0.1.0/README.md
ADDED
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
# mm-http
|
|
2
|
+
|
|
3
|
+
A simple and convenient HTTP client library for Python with both synchronous and asynchronous support.
|
|
4
|
+
|
|
5
|
+
## Features
|
|
6
|
+
|
|
7
|
+
- **Simple API** for one-off HTTP requests
|
|
8
|
+
- **Sync and Async** support with identical interfaces
|
|
9
|
+
- **JSON path navigation** with dot notation (`response.parse_json_body("user.profile.name")`)
|
|
10
|
+
- **Proxy support** (HTTP and SOCKS5)
|
|
11
|
+
- **Unified error handling**
|
|
12
|
+
- **Type-safe** with full type annotations
|
|
13
|
+
- **No sessions** - optimized for simple, stateless requests
|
|
14
|
+
|
|
15
|
+
## Quick Start
|
|
16
|
+
|
|
17
|
+
### Async Usage
|
|
18
|
+
|
|
19
|
+
```python
|
|
20
|
+
from mm_http import http_request
|
|
21
|
+
|
|
22
|
+
# Simple GET request
|
|
23
|
+
response = await http_request("https://api.github.com/users/octocat")
|
|
24
|
+
user_name = response.parse_json_body("name") # Navigate JSON with dot notation
|
|
25
|
+
|
|
26
|
+
# POST with JSON data
|
|
27
|
+
response = await http_request(
|
|
28
|
+
"https://httpbin.org/post",
|
|
29
|
+
method="POST",
|
|
30
|
+
json={"key": "value"},
|
|
31
|
+
headers={"Authorization": "Bearer token"}
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
# With proxy
|
|
35
|
+
response = await http_request(
|
|
36
|
+
"https://api.ipify.org?format=json",
|
|
37
|
+
proxy="socks5://127.0.0.1:1080"
|
|
38
|
+
)
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
### Sync Usage
|
|
42
|
+
|
|
43
|
+
```python
|
|
44
|
+
from mm_http import http_request_sync
|
|
45
|
+
|
|
46
|
+
# Same API, but synchronous
|
|
47
|
+
response = http_request_sync("https://api.github.com/users/octocat")
|
|
48
|
+
user_name = response.parse_json_body("name")
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
## API Reference
|
|
52
|
+
|
|
53
|
+
### Functions
|
|
54
|
+
|
|
55
|
+
- `http_request(url, **kwargs)` - Async HTTP request
|
|
56
|
+
- `http_request_sync(url, **kwargs)` - Sync HTTP request
|
|
57
|
+
|
|
58
|
+
### Parameters
|
|
59
|
+
|
|
60
|
+
- `url: str` - Request URL
|
|
61
|
+
- `method: str = "GET"` - HTTP method
|
|
62
|
+
- `params: dict[str, Any] | None = None` - URL query parameters
|
|
63
|
+
- `data: dict[str, object] | None = None` - Form data
|
|
64
|
+
- `json: dict[str, object] | None = None` - JSON data
|
|
65
|
+
- `headers: dict[str, str] | None = None` - HTTP headers
|
|
66
|
+
- `cookies: LooseCookies | None = None` - Cookies
|
|
67
|
+
- `user_agent: str | None = None` - User-Agent header
|
|
68
|
+
- `proxy: str | None = None` - Proxy URL (supports http://, https://, socks4://, socks5://)
|
|
69
|
+
- `timeout: float | None = 10.0` - Request timeout in seconds
|
|
70
|
+
|
|
71
|
+
### HttpResponse
|
|
72
|
+
|
|
73
|
+
```python
|
|
74
|
+
@dataclass
|
|
75
|
+
class HttpResponse:
|
|
76
|
+
status_code: int | None
|
|
77
|
+
error: HttpError | None
|
|
78
|
+
error_message: str | None
|
|
79
|
+
body: str | None
|
|
80
|
+
headers: dict[str, str] | None
|
|
81
|
+
|
|
82
|
+
def parse_json_body(self, path: str | None = None, none_on_error: bool = False) -> Any
|
|
83
|
+
def is_err(self) -> bool
|
|
84
|
+
def content_type(self) -> str | None
|
|
85
|
+
def to_result_ok[T](self, value: T) -> Result[T]
|
|
86
|
+
def to_result_err[T](self, error: str | Exception | None = None) -> Result[T]
|
|
87
|
+
```
|
|
88
|
+
|
|
89
|
+
### Error Types
|
|
90
|
+
|
|
91
|
+
```python
|
|
92
|
+
class HttpError(str, Enum):
|
|
93
|
+
TIMEOUT = "timeout"
|
|
94
|
+
PROXY = "proxy"
|
|
95
|
+
INVALID_URL = "invalid_url"
|
|
96
|
+
CONNECTION = "connection"
|
|
97
|
+
ERROR = "error"
|
|
98
|
+
```
|
|
99
|
+
|
|
100
|
+
## Examples
|
|
101
|
+
|
|
102
|
+
### JSON Path Navigation
|
|
103
|
+
|
|
104
|
+
```python
|
|
105
|
+
response = await http_request("https://api.github.com/users/octocat")
|
|
106
|
+
|
|
107
|
+
# Instead of: json.loads(response.body)["plan"]["name"]
|
|
108
|
+
plan_name = response.parse_json_body("plan.name")
|
|
109
|
+
|
|
110
|
+
# Safe navigation - returns None if path doesn't exist
|
|
111
|
+
followers = response.parse_json_body("followers_count")
|
|
112
|
+
nonexistent = response.parse_json_body("does.not.exist") # Returns None
|
|
113
|
+
```
|
|
114
|
+
|
|
115
|
+
### Error Handling
|
|
116
|
+
|
|
117
|
+
```python
|
|
118
|
+
response = await http_request("https://example.com", timeout=5.0)
|
|
119
|
+
|
|
120
|
+
if response.is_err():
|
|
121
|
+
print(f"Request failed: {response.error} - {response.error_message}")
|
|
122
|
+
else:
|
|
123
|
+
print(f"Success: {response.status_code}")
|
|
124
|
+
```
|
|
125
|
+
|
|
126
|
+
### Proxy Usage
|
|
127
|
+
|
|
128
|
+
```python
|
|
129
|
+
# HTTP proxy
|
|
130
|
+
response = await http_request(
|
|
131
|
+
"https://httpbin.org/ip",
|
|
132
|
+
proxy="http://proxy.example.com:8080"
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
# SOCKS5 proxy
|
|
136
|
+
response = await http_request(
|
|
137
|
+
"https://httpbin.org/ip",
|
|
138
|
+
proxy="socks5://127.0.0.1:1080"
|
|
139
|
+
)
|
|
140
|
+
```
|
|
141
|
+
|
|
142
|
+
### Custom Headers and User-Agent
|
|
143
|
+
|
|
144
|
+
```python
|
|
145
|
+
response = await http_request(
|
|
146
|
+
"https://api.example.com/data",
|
|
147
|
+
headers={
|
|
148
|
+
"Authorization": "Bearer your-token",
|
|
149
|
+
"Accept": "application/json"
|
|
150
|
+
},
|
|
151
|
+
user_agent="MyApp/1.0"
|
|
152
|
+
)
|
|
153
|
+
```
|
|
154
|
+
|
|
155
|
+
### Result Type Integration
|
|
156
|
+
|
|
157
|
+
For applications using `Result[T, E]` pattern, `HttpResponse` provides convenient methods to convert responses into Result types:
|
|
158
|
+
|
|
159
|
+
```python
|
|
160
|
+
from mm_result import Result
|
|
161
|
+
|
|
162
|
+
async def get_user_id() -> Result[int]:
|
|
163
|
+
response = await http_request("https://api.example.com/user")
|
|
164
|
+
|
|
165
|
+
if response.is_err():
|
|
166
|
+
return response.to_result_err() # Convert error to Result[T]
|
|
167
|
+
|
|
168
|
+
user_id = response.parse_json_body("id")
|
|
169
|
+
return response.to_result_ok(user_id) # Convert success to Result[T]
|
|
170
|
+
|
|
171
|
+
# Usage
|
|
172
|
+
result = await get_user_id()
|
|
173
|
+
if result.is_ok():
|
|
174
|
+
print(f"User ID: {result.value}")
|
|
175
|
+
else:
|
|
176
|
+
print(f"Error: {result.error}")
|
|
177
|
+
print(f"HTTP details: {result.extra}") # Contains full HTTP response data
|
|
178
|
+
```
|
|
179
|
+
|
|
180
|
+
**Result Methods:**
|
|
181
|
+
- `to_result_ok(value)` - Create `Result[T]` with success value, preserving HTTP details in `extra`
|
|
182
|
+
- `to_result_err(error?)` - Create `Result[T]` with error, preserving HTTP details in `extra`
|
|
183
|
+
|
|
184
|
+
## When to Use
|
|
185
|
+
|
|
186
|
+
**Use mm-http when you need:**
|
|
187
|
+
- Simple, one-off HTTP requests
|
|
188
|
+
- JSON API interactions with easy data access
|
|
189
|
+
- Proxy support for requests
|
|
190
|
+
- Unified sync/async interface
|
|
191
|
+
|
|
192
|
+
**Use requests/aiohttp directly when you need:**
|
|
193
|
+
- Session management and connection pooling
|
|
194
|
+
- Complex authentication flows
|
|
195
|
+
- Streaming responses
|
|
196
|
+
- Advanced HTTP features
|
|
197
|
+
- Custom retry logic
|
mm_http-0.1.0/dict.dic
ADDED
|
File without changes
|
mm_http-0.1.0/justfile
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
version := `uv run python -c 'import tomllib; print(tomllib.load(open("pyproject.toml", "rb"))["project"]["version"])'`
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
clean:
|
|
5
|
+
rm -rf .pytest_cache .mypy_cache .ruff_cache .coverage dist build src/*.egg-info
|
|
6
|
+
|
|
7
|
+
build: clean
|
|
8
|
+
uv build
|
|
9
|
+
|
|
10
|
+
format:
|
|
11
|
+
uv run ruff check --select I --fix src tests
|
|
12
|
+
uv run ruff format src tests
|
|
13
|
+
|
|
14
|
+
test:
|
|
15
|
+
uv run pytest -n auto tests
|
|
16
|
+
|
|
17
|
+
lint: format pre-commit
|
|
18
|
+
uv run ruff check src tests
|
|
19
|
+
uv run mypy src
|
|
20
|
+
|
|
21
|
+
audit:
|
|
22
|
+
uv export --no-dev --all-extras --format requirements-txt --no-emit-project > requirements.txt
|
|
23
|
+
uv run pip-audit -r requirements.txt --disable-pip
|
|
24
|
+
rm requirements.txt
|
|
25
|
+
uv run bandit --silent --recursive --configfile "pyproject.toml" src
|
|
26
|
+
|
|
27
|
+
publish: build lint audit test
|
|
28
|
+
git diff-index --quiet HEAD
|
|
29
|
+
printf "Enter PyPI token: " && IFS= read -rs TOKEN && echo && uv publish --token "$TOKEN"
|
|
30
|
+
git tag -a 'v{{version}}' -m 'v{{version}}'
|
|
31
|
+
git push origin v{{version}}
|
|
32
|
+
|
|
33
|
+
sync:
|
|
34
|
+
uv sync --all-extras
|
|
35
|
+
|
|
36
|
+
pre-commit:
|
|
37
|
+
uv run pre-commit run --all-files
|
|
38
|
+
|
|
39
|
+
pre-commit-autoupdate:
|
|
40
|
+
uv run pre-commit autoupdate
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "mm-http"
|
|
3
|
+
version = "0.1.0"
|
|
4
|
+
description = ""
|
|
5
|
+
requires-python = ">=3.13"
|
|
6
|
+
dependencies = [
|
|
7
|
+
"mm-result~=0.1.1",
|
|
8
|
+
"requests[socks]~=2.32.4",
|
|
9
|
+
"aiohttp~=3.12.12",
|
|
10
|
+
"aiohttp-socks~=0.10.1",
|
|
11
|
+
"pydash~=8.0.5",
|
|
12
|
+
]
|
|
13
|
+
|
|
14
|
+
[build-system]
|
|
15
|
+
requires = ["hatchling"]
|
|
16
|
+
build-backend = "hatchling.build"
|
|
17
|
+
|
|
18
|
+
[tool.uv]
|
|
19
|
+
dev-dependencies = [
|
|
20
|
+
"pytest~=8.4.0",
|
|
21
|
+
"pytest-asyncio~=1.0.0",
|
|
22
|
+
"pytest-xdist~=3.7.0",
|
|
23
|
+
"pytest-httpserver~=1.1.3",
|
|
24
|
+
"ruff~=0.11.13",
|
|
25
|
+
"mypy~=1.16.0",
|
|
26
|
+
"pip-audit~=2.9.0",
|
|
27
|
+
"bandit~=1.8.3",
|
|
28
|
+
"pre-commit~=4.2.0",
|
|
29
|
+
"types-requests~=2.32.4.20250611",
|
|
30
|
+
"python-dotenv~=1.1.0",
|
|
31
|
+
]
|
|
32
|
+
|
|
33
|
+
[tool.mypy]
|
|
34
|
+
python_version = "3.13"
|
|
35
|
+
warn_no_return = false
|
|
36
|
+
strict = true
|
|
37
|
+
exclude = ["^tests/", "^tmp/"]
|
|
38
|
+
|
|
39
|
+
[tool.ruff]
|
|
40
|
+
line-length = 130
|
|
41
|
+
target-version = "py313"
|
|
42
|
+
[tool.ruff.lint]
|
|
43
|
+
select = ["ALL"]
|
|
44
|
+
ignore = [
|
|
45
|
+
"TC", # flake8-type-checking, TYPE_CHECKING is dangerous, for example it doesn't work with pydantic
|
|
46
|
+
"A005", # flake8-builtins: stdlib-module-shadowing
|
|
47
|
+
"ERA001", # eradicate: commented-out-code
|
|
48
|
+
"PT", # flake8-pytest-style
|
|
49
|
+
"D", # pydocstyle
|
|
50
|
+
"FIX", # flake8-fixme
|
|
51
|
+
"PLR0911", # pylint: too-many-return-statements
|
|
52
|
+
"PLR0912", # pylint: too-many-branches
|
|
53
|
+
"PLR0913", # pylint: too-many-arguments
|
|
54
|
+
"PLR2004", # pylint: magic-value-comparison
|
|
55
|
+
"PLC0414", # pylint: useless-import-alias
|
|
56
|
+
"FBT", # flake8-boolean-trap
|
|
57
|
+
"EM", # flake8-errmsg
|
|
58
|
+
"TRY003", # tryceratops: raise-vanilla-args
|
|
59
|
+
"C901", # mccabe: complex-structure,
|
|
60
|
+
"BLE001", # flake8-blind-except
|
|
61
|
+
"S311", # bandit: suspicious-non-cryptographic-random-usage
|
|
62
|
+
"TD002", # flake8-todos: missing-todo-author
|
|
63
|
+
"TD003", # flake8-todos: missing-todo-link
|
|
64
|
+
"RET503", # flake8-return: implicit-return
|
|
65
|
+
"COM812", # it's used in ruff formatter
|
|
66
|
+
"ASYNC109",
|
|
67
|
+
"G004",
|
|
68
|
+
]
|
|
69
|
+
[tool.ruff.lint.pep8-naming]
|
|
70
|
+
classmethod-decorators = ["field_validator"]
|
|
71
|
+
[tool.ruff.lint.per-file-ignores]
|
|
72
|
+
"tests/*.py" = ["ANN", "S"]
|
|
73
|
+
[tool.ruff.format]
|
|
74
|
+
quote-style = "double"
|
|
75
|
+
indent-style = "space"
|
|
76
|
+
|
|
77
|
+
[tool.bandit]
|
|
78
|
+
exclude_dirs = ["tests"]
|
|
79
|
+
skips = ["B311"]
|
|
80
|
+
|
|
81
|
+
[tool.pytest.ini_options]
|
|
82
|
+
markers = ["proxy: requires access proxies"]
|
|
83
|
+
asyncio_mode = "auto"
|
|
84
|
+
asyncio_default_fixture_loop_scope = "function"
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
import aiohttp
|
|
4
|
+
from aiohttp import ClientHttpProxyError, InvalidUrlClientError
|
|
5
|
+
from aiohttp.typedefs import LooseCookies
|
|
6
|
+
from aiohttp_socks import ProxyConnectionError, ProxyConnector
|
|
7
|
+
from multidict import CIMultiDictProxy
|
|
8
|
+
|
|
9
|
+
from .response import HttpError, HttpResponse
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
async def http_request(
|
|
13
|
+
url: str,
|
|
14
|
+
*,
|
|
15
|
+
method: str = "GET",
|
|
16
|
+
params: dict[str, Any] | None = None,
|
|
17
|
+
data: dict[str, object] | None = None,
|
|
18
|
+
json: dict[str, object] | None = None,
|
|
19
|
+
headers: dict[str, str] | None = None,
|
|
20
|
+
cookies: LooseCookies | None = None,
|
|
21
|
+
user_agent: str | None = None,
|
|
22
|
+
proxy: str | None = None,
|
|
23
|
+
timeout: float | None = 10.0,
|
|
24
|
+
) -> HttpResponse:
|
|
25
|
+
"""
|
|
26
|
+
Send an HTTP request and return the response.
|
|
27
|
+
"""
|
|
28
|
+
timeout_ = aiohttp.ClientTimeout(total=timeout) if timeout else None
|
|
29
|
+
if user_agent:
|
|
30
|
+
if not headers:
|
|
31
|
+
headers = {}
|
|
32
|
+
headers["User-Agent"] = user_agent
|
|
33
|
+
|
|
34
|
+
try:
|
|
35
|
+
if proxy and proxy.startswith("socks"):
|
|
36
|
+
return await _request_with_socks_proxy(
|
|
37
|
+
url,
|
|
38
|
+
method=method,
|
|
39
|
+
params=params,
|
|
40
|
+
data=data,
|
|
41
|
+
json=json,
|
|
42
|
+
headers=headers,
|
|
43
|
+
cookies=cookies,
|
|
44
|
+
proxy=proxy,
|
|
45
|
+
timeout=timeout_,
|
|
46
|
+
)
|
|
47
|
+
return await _request_with_http_or_none_proxy(
|
|
48
|
+
url,
|
|
49
|
+
method=method,
|
|
50
|
+
params=params,
|
|
51
|
+
data=data,
|
|
52
|
+
json=json,
|
|
53
|
+
headers=headers,
|
|
54
|
+
cookies=cookies,
|
|
55
|
+
proxy=proxy,
|
|
56
|
+
timeout=timeout_,
|
|
57
|
+
)
|
|
58
|
+
except TimeoutError as err:
|
|
59
|
+
return HttpResponse(error=HttpError.TIMEOUT, error_message=str(err))
|
|
60
|
+
except (aiohttp.ClientProxyConnectionError, ProxyConnectionError, ClientHttpProxyError) as err:
|
|
61
|
+
return HttpResponse(error=HttpError.PROXY, error_message=str(err))
|
|
62
|
+
except InvalidUrlClientError as e:
|
|
63
|
+
return HttpResponse(error=HttpError.INVALID_URL, error_message=str(e))
|
|
64
|
+
except Exception as err:
|
|
65
|
+
return HttpResponse(error=HttpError.ERROR, error_message=str(err))
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
async def _request_with_http_or_none_proxy(
|
|
69
|
+
url: str,
|
|
70
|
+
*,
|
|
71
|
+
method: str = "GET",
|
|
72
|
+
params: dict[str, Any] | None = None,
|
|
73
|
+
data: dict[str, object] | None = None,
|
|
74
|
+
json: dict[str, object] | None = None,
|
|
75
|
+
headers: dict[str, str] | None = None,
|
|
76
|
+
cookies: LooseCookies | None = None,
|
|
77
|
+
proxy: str | None = None,
|
|
78
|
+
timeout: aiohttp.ClientTimeout | None,
|
|
79
|
+
) -> HttpResponse:
|
|
80
|
+
async with aiohttp.request(
|
|
81
|
+
method, url, params=params, data=data, json=json, headers=headers, cookies=cookies, proxy=proxy, timeout=timeout
|
|
82
|
+
) as res:
|
|
83
|
+
return HttpResponse(
|
|
84
|
+
status_code=res.status,
|
|
85
|
+
error=None,
|
|
86
|
+
error_message=None,
|
|
87
|
+
body=(await res.read()).decode(),
|
|
88
|
+
headers=headers_dict(res.headers),
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
async def _request_with_socks_proxy(
|
|
93
|
+
url: str,
|
|
94
|
+
*,
|
|
95
|
+
method: str = "GET",
|
|
96
|
+
proxy: str,
|
|
97
|
+
params: dict[str, Any] | None = None,
|
|
98
|
+
data: dict[str, object] | None = None,
|
|
99
|
+
json: dict[str, object] | None = None,
|
|
100
|
+
headers: dict[str, str] | None = None,
|
|
101
|
+
cookies: LooseCookies | None = None,
|
|
102
|
+
timeout: aiohttp.ClientTimeout | None,
|
|
103
|
+
) -> HttpResponse:
|
|
104
|
+
connector = ProxyConnector.from_url(proxy)
|
|
105
|
+
async with (
|
|
106
|
+
aiohttp.ClientSession(connector=connector) as session,
|
|
107
|
+
session.request(
|
|
108
|
+
method, url, params=params, data=data, json=json, headers=headers, cookies=cookies, timeout=timeout
|
|
109
|
+
) as res,
|
|
110
|
+
):
|
|
111
|
+
return HttpResponse(
|
|
112
|
+
status_code=res.status,
|
|
113
|
+
error=None,
|
|
114
|
+
error_message=None,
|
|
115
|
+
body=(await res.read()).decode(),
|
|
116
|
+
headers=headers_dict(res.headers),
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def headers_dict(headers: CIMultiDictProxy[str]) -> dict[str, str]:
|
|
121
|
+
result: dict[str, str] = {}
|
|
122
|
+
for key in headers:
|
|
123
|
+
values = headers.getall(key)
|
|
124
|
+
if len(values) == 1:
|
|
125
|
+
result[key] = values[0]
|
|
126
|
+
else:
|
|
127
|
+
result[key] = ", ".join(values)
|
|
128
|
+
return result
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
import requests
|
|
4
|
+
from requests.exceptions import InvalidSchema, MissingSchema, ProxyError
|
|
5
|
+
|
|
6
|
+
from .response import HttpError, HttpResponse
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def http_request_sync(
|
|
10
|
+
url: str,
|
|
11
|
+
*,
|
|
12
|
+
method: str = "GET",
|
|
13
|
+
params: dict[str, Any] | None = None,
|
|
14
|
+
data: dict[str, Any] | None = None,
|
|
15
|
+
json: dict[str, Any] | None = None,
|
|
16
|
+
headers: dict[str, Any] | None = None,
|
|
17
|
+
cookies: dict[str, Any] | None = None,
|
|
18
|
+
user_agent: str | None = None,
|
|
19
|
+
proxy: str | None = None,
|
|
20
|
+
timeout: float | None = 10.0,
|
|
21
|
+
) -> HttpResponse:
|
|
22
|
+
"""
|
|
23
|
+
Send a synchronous HTTP request and return the response.
|
|
24
|
+
"""
|
|
25
|
+
if user_agent:
|
|
26
|
+
if headers is None:
|
|
27
|
+
headers = {}
|
|
28
|
+
headers["User-Agent"] = user_agent
|
|
29
|
+
|
|
30
|
+
proxies: dict[str, str] | None = None
|
|
31
|
+
if proxy:
|
|
32
|
+
proxies = {
|
|
33
|
+
"http": proxy,
|
|
34
|
+
"https": proxy,
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
try:
|
|
38
|
+
res = requests.request(
|
|
39
|
+
method=method,
|
|
40
|
+
url=url,
|
|
41
|
+
params=params,
|
|
42
|
+
data=data,
|
|
43
|
+
json=json,
|
|
44
|
+
headers=headers,
|
|
45
|
+
cookies=cookies,
|
|
46
|
+
timeout=timeout,
|
|
47
|
+
proxies=proxies,
|
|
48
|
+
)
|
|
49
|
+
return HttpResponse(
|
|
50
|
+
status_code=res.status_code,
|
|
51
|
+
error=None,
|
|
52
|
+
error_message=None,
|
|
53
|
+
body=res.text,
|
|
54
|
+
headers=dict(res.headers),
|
|
55
|
+
)
|
|
56
|
+
except requests.Timeout as e:
|
|
57
|
+
return HttpResponse(error=HttpError.TIMEOUT, error_message=str(e))
|
|
58
|
+
except ProxyError as e:
|
|
59
|
+
return HttpResponse(error=HttpError.PROXY, error_message=str(e))
|
|
60
|
+
except (InvalidSchema, MissingSchema) as e:
|
|
61
|
+
return HttpResponse(error=HttpError.INVALID_URL, error_message=str(e))
|
|
62
|
+
except Exception as e:
|
|
63
|
+
return HttpResponse(error=HttpError.ERROR, error_message=str(e))
|
|
File without changes
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import enum
|
|
4
|
+
import json
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
import pydash
|
|
9
|
+
from mm_result import Result
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@enum.unique
|
|
13
|
+
class HttpError(str, enum.Enum):
|
|
14
|
+
TIMEOUT = "timeout"
|
|
15
|
+
PROXY = "proxy"
|
|
16
|
+
INVALID_URL = "invalid_url"
|
|
17
|
+
CONNECTION = "connection"
|
|
18
|
+
ERROR = "error"
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@dataclass
|
|
22
|
+
class HttpResponse:
|
|
23
|
+
"""HTTP response with status, error, body, and headers."""
|
|
24
|
+
|
|
25
|
+
status_code: int | None = None
|
|
26
|
+
error: HttpError | None = None
|
|
27
|
+
error_message: str | None = None
|
|
28
|
+
body: str | None = None
|
|
29
|
+
headers: dict[str, str] | None = None
|
|
30
|
+
|
|
31
|
+
def parse_json_body(self, path: str | None = None, none_on_error: bool = False) -> Any: # noqa: ANN401
|
|
32
|
+
"""Parse JSON body and optionally extract value by path."""
|
|
33
|
+
if self.body is None:
|
|
34
|
+
if none_on_error:
|
|
35
|
+
return None
|
|
36
|
+
raise ValueError("Body is None")
|
|
37
|
+
|
|
38
|
+
try:
|
|
39
|
+
res = json.loads(self.body)
|
|
40
|
+
return pydash.get(res, path, None) if path else res
|
|
41
|
+
except json.JSONDecodeError:
|
|
42
|
+
if none_on_error:
|
|
43
|
+
return None
|
|
44
|
+
raise
|
|
45
|
+
|
|
46
|
+
def is_err(self) -> bool:
|
|
47
|
+
"""Check if response represents an error (has error or status >= 400)."""
|
|
48
|
+
return self.error is not None or (self.status_code is not None and self.status_code >= 400)
|
|
49
|
+
|
|
50
|
+
def to_result_err[T](self, error: str | Exception | tuple[str, Exception] | None = None) -> Result[T]:
|
|
51
|
+
"""Create error Result[T] from HttpResponse."""
|
|
52
|
+
return Result.err(error or self.error or "error", extra=self.to_dict())
|
|
53
|
+
|
|
54
|
+
def to_result_ok[T](self, value: T) -> Result[T]:
|
|
55
|
+
"""Create success Result[T] from HttpResponse with given value."""
|
|
56
|
+
return Result.ok(value, extra=self.to_dict())
|
|
57
|
+
|
|
58
|
+
def to_dict(self) -> dict[str, Any]:
|
|
59
|
+
"""Convert HttpResponse to dictionary."""
|
|
60
|
+
return {
|
|
61
|
+
"status_code": self.status_code,
|
|
62
|
+
"error": self.error.value if self.error else None,
|
|
63
|
+
"error_message": self.error_message,
|
|
64
|
+
"body": self.body,
|
|
65
|
+
"headers": self.headers,
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
@property
|
|
69
|
+
def content_type(self) -> str | None:
|
|
70
|
+
"""Get Content-Type header value (case-insensitive)."""
|
|
71
|
+
if self.headers is None:
|
|
72
|
+
return None
|
|
73
|
+
for key in self.headers:
|
|
74
|
+
if key.lower() == "content-type":
|
|
75
|
+
return self.headers[key]
|
|
76
|
+
return None
|
|
77
|
+
|
|
78
|
+
def __repr__(self) -> str:
|
|
79
|
+
parts: list[str] = []
|
|
80
|
+
if self.status_code is not None:
|
|
81
|
+
parts.append(f"status_code={self.status_code!r}")
|
|
82
|
+
if self.error is not None:
|
|
83
|
+
parts.append(f"error={self.error!r}")
|
|
84
|
+
if self.error_message is not None:
|
|
85
|
+
parts.append(f"error_message={self.error_message!r}")
|
|
86
|
+
if self.body is not None:
|
|
87
|
+
parts.append(f"body={self.body!r}")
|
|
88
|
+
if self.headers is not None:
|
|
89
|
+
parts.append(f"headers={self.headers!r}")
|
|
90
|
+
return f"HttpResponse({', '.join(parts)})"
|
|
File without changes
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
import pytest
|
|
4
|
+
from dotenv import load_dotenv
|
|
5
|
+
|
|
6
|
+
load_dotenv()
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@pytest.fixture
|
|
10
|
+
def proxy_http() -> str:
|
|
11
|
+
proxy = os.getenv("PROXY_HTTP")
|
|
12
|
+
if not proxy:
|
|
13
|
+
raise ValueError("PROXY_HTTP environment variable must be set")
|
|
14
|
+
return proxy
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@pytest.fixture
|
|
18
|
+
def proxy_socks5() -> str:
|
|
19
|
+
proxy = os.getenv("PROXY_SOCKS5")
|
|
20
|
+
if not proxy:
|
|
21
|
+
raise ValueError("PROXPROXY_SOCKS5Y_HTTP environment variable must be set")
|
|
22
|
+
return proxy
|