audd 1.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- audd/__init__.py +46 -0
- audd/_callbacks.py +69 -0
- audd/_http.py +182 -0
- audd/_retry.py +150 -0
- audd/_source.py +99 -0
- audd/_user_agent.py +13 -0
- audd/_version.py +3 -0
- audd/advanced.py +86 -0
- audd/client.py +619 -0
- audd/custom_catalog.py +91 -0
- audd/errors.py +198 -0
- audd/longpoll.py +184 -0
- audd/models.py +408 -0
- audd/streams.py +284 -0
- audd-1.4.0.dist-info/METADATA +193 -0
- audd-1.4.0.dist-info/RECORD +18 -0
- audd-1.4.0.dist-info/WHEEL +4 -0
- audd-1.4.0.dist-info/licenses/LICENSE +21 -0
audd/__init__.py
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
"""Official Python SDK for the AudD music recognition API."""
|
|
2
|
+
from audd._version import __version__
|
|
3
|
+
from audd.client import AsyncAudD, AudD
|
|
4
|
+
from audd.errors import (
|
|
5
|
+
AudDAPIError,
|
|
6
|
+
AudDAuthenticationError,
|
|
7
|
+
AudDBlockedError,
|
|
8
|
+
AudDConnectionError,
|
|
9
|
+
AudDCustomCatalogAccessError,
|
|
10
|
+
AudDError,
|
|
11
|
+
AudDInvalidAudioError,
|
|
12
|
+
AudDInvalidRequestError,
|
|
13
|
+
AudDNeedsUpdateError,
|
|
14
|
+
AudDNotReleasedError,
|
|
15
|
+
AudDQuotaError,
|
|
16
|
+
AudDRateLimitError,
|
|
17
|
+
AudDSerializationError,
|
|
18
|
+
AudDServerError,
|
|
19
|
+
AudDStreamLimitError,
|
|
20
|
+
AudDSubscriptionError,
|
|
21
|
+
)
|
|
22
|
+
from audd.longpoll import AsyncLongpollConsumer, LongpollConsumer
|
|
23
|
+
|
|
24
|
+
__all__ = [
|
|
25
|
+
"AsyncAudD",
|
|
26
|
+
"AsyncLongpollConsumer",
|
|
27
|
+
"AudD",
|
|
28
|
+
"AudDAPIError",
|
|
29
|
+
"AudDAuthenticationError",
|
|
30
|
+
"AudDBlockedError",
|
|
31
|
+
"AudDConnectionError",
|
|
32
|
+
"AudDCustomCatalogAccessError",
|
|
33
|
+
"AudDError",
|
|
34
|
+
"AudDInvalidAudioError",
|
|
35
|
+
"AudDInvalidRequestError",
|
|
36
|
+
"AudDNeedsUpdateError",
|
|
37
|
+
"AudDNotReleasedError",
|
|
38
|
+
"AudDQuotaError",
|
|
39
|
+
"AudDRateLimitError",
|
|
40
|
+
"AudDSerializationError",
|
|
41
|
+
"AudDServerError",
|
|
42
|
+
"AudDStreamLimitError",
|
|
43
|
+
"AudDSubscriptionError",
|
|
44
|
+
"LongpollConsumer",
|
|
45
|
+
"__version__",
|
|
46
|
+
]
|
audd/_callbacks.py
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
"""Pure helpers used by streams.* and callbacks. No HTTP, no SDK state."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import hashlib
|
|
5
|
+
from typing import Any
|
|
6
|
+
from urllib.parse import parse_qs, urlencode, urlparse, urlunparse
|
|
7
|
+
|
|
8
|
+
from audd.errors import AudDInvalidRequestError
|
|
9
|
+
from audd.models import StreamCallbackPayload
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class DuplicateReturnParameterError(AudDInvalidRequestError):
|
|
13
|
+
"""Raised when streams.set_callback_url is given both a URL containing
|
|
14
|
+
?return=... AND a return_metadata argument — conflicting intent."""
|
|
15
|
+
|
|
16
|
+
def __init__(self) -> None:
|
|
17
|
+
super().__init__(
|
|
18
|
+
error_code=0,
|
|
19
|
+
message=(
|
|
20
|
+
"URL already contains a `return` query parameter; pass return_metadata=None "
|
|
21
|
+
"or remove the parameter from the URL — refusing to silently overwrite."
|
|
22
|
+
),
|
|
23
|
+
http_status=0,
|
|
24
|
+
request_id=None,
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def derive_longpoll_category(api_token: str, radio_id: int) -> str:
|
|
29
|
+
"""Compute the 9-char longpoll category locally from token + radio_id.
|
|
30
|
+
|
|
31
|
+
Formula (per docs.audd.io/streams.md): hex-MD5 of (hex-MD5 of api_token,
|
|
32
|
+
concatenated with the radio_id rendered as a decimal string), truncated
|
|
33
|
+
to the first 9 hex chars.
|
|
34
|
+
"""
|
|
35
|
+
inner = hashlib.md5(api_token.encode("utf-8")).hexdigest()
|
|
36
|
+
full = hashlib.md5((inner + str(radio_id)).encode("utf-8")).hexdigest()
|
|
37
|
+
return full[:9]
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def parse_callback(body: dict[str, Any]) -> StreamCallbackPayload:
|
|
41
|
+
"""Parse a callback POST body into a typed payload."""
|
|
42
|
+
return StreamCallbackPayload.parse(body)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def add_return_to_url(
|
|
46
|
+
url: str,
|
|
47
|
+
return_metadata: str | list[str] | None,
|
|
48
|
+
) -> str:
|
|
49
|
+
"""Append `?return=<metadata>` (or merge as `&return=`) to the callback URL.
|
|
50
|
+
|
|
51
|
+
If `return_metadata` is None, return the URL unchanged.
|
|
52
|
+
If the URL already has a `return` query parameter, raise to avoid silent overwrite.
|
|
53
|
+
"""
|
|
54
|
+
if return_metadata is None:
|
|
55
|
+
return url
|
|
56
|
+
|
|
57
|
+
metadata = (
|
|
58
|
+
",".join(return_metadata)
|
|
59
|
+
if isinstance(return_metadata, list)
|
|
60
|
+
else return_metadata
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
parsed = urlparse(url)
|
|
64
|
+
qs = parse_qs(parsed.query, keep_blank_values=True)
|
|
65
|
+
if "return" in qs:
|
|
66
|
+
raise DuplicateReturnParameterError()
|
|
67
|
+
qs["return"] = [metadata]
|
|
68
|
+
new_query = urlencode([(k, v) for k, vs in qs.items() for v in vs])
|
|
69
|
+
return urlunparse(parsed._replace(query=new_query))
|
audd/_http.py
ADDED
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
"""HTTP transport. Sync (HTTPClient) and async (AsyncHTTPClient) wrappers around httpx."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import threading
|
|
5
|
+
from collections.abc import Mapping
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
import httpx
|
|
10
|
+
|
|
11
|
+
from audd._user_agent import user_agent
|
|
12
|
+
|
|
13
|
+
DEFAULT_TIMEOUTS = httpx.Timeout(connect=30.0, read=60.0, write=60.0, pool=30.0)
|
|
14
|
+
ENTERPRISE_TIMEOUTS = httpx.Timeout(connect=30.0, read=3600.0, write=3600.0, pool=30.0)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@dataclass(frozen=True)
|
|
18
|
+
class HTTPResponse:
|
|
19
|
+
json_body: Any # parsed JSON body, or None on parse failure
|
|
20
|
+
http_status: int
|
|
21
|
+
request_id: str | None # x-request-id header, if present
|
|
22
|
+
raw_text: str # original response body text (for AudDSerializationError diagnostics)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _request_id(headers: Mapping[str, str]) -> str | None:
|
|
26
|
+
return headers.get("x-request-id") or headers.get("X-Request-ID")
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class HTTPClient:
|
|
30
|
+
def __init__(
|
|
31
|
+
self,
|
|
32
|
+
api_token: str,
|
|
33
|
+
*,
|
|
34
|
+
timeouts: httpx.Timeout = DEFAULT_TIMEOUTS,
|
|
35
|
+
httpx_client: httpx.Client | None = None,
|
|
36
|
+
) -> None:
|
|
37
|
+
self._token_lock = threading.Lock()
|
|
38
|
+
self._api_token = api_token
|
|
39
|
+
self._owned = httpx_client is None
|
|
40
|
+
self._client = httpx_client or httpx.Client(
|
|
41
|
+
timeout=timeouts,
|
|
42
|
+
headers={"User-Agent": user_agent()},
|
|
43
|
+
)
|
|
44
|
+
if not self._owned and "User-Agent" not in self._client.headers:
|
|
45
|
+
self._client.headers["User-Agent"] = user_agent()
|
|
46
|
+
|
|
47
|
+
def set_api_token(self, new_token: str) -> None:
|
|
48
|
+
"""Atomically swap the token used for subsequent requests."""
|
|
49
|
+
with self._token_lock:
|
|
50
|
+
self._api_token = new_token
|
|
51
|
+
|
|
52
|
+
def _current_token(self) -> str:
|
|
53
|
+
with self._token_lock:
|
|
54
|
+
return self._api_token
|
|
55
|
+
|
|
56
|
+
def post_form(
|
|
57
|
+
self,
|
|
58
|
+
url: str,
|
|
59
|
+
data: dict[str, Any],
|
|
60
|
+
*,
|
|
61
|
+
timeout: httpx.Timeout | None = None,
|
|
62
|
+
files: Mapping[str, Any] | None = None,
|
|
63
|
+
) -> HTTPResponse:
|
|
64
|
+
"""POST multipart/form-data with api_token always included."""
|
|
65
|
+
full_data = dict(data)
|
|
66
|
+
full_data["api_token"] = self._current_token()
|
|
67
|
+
kwargs: dict[str, Any] = {}
|
|
68
|
+
if timeout is not None:
|
|
69
|
+
kwargs["timeout"] = timeout
|
|
70
|
+
if files is not None:
|
|
71
|
+
kwargs["files"] = files
|
|
72
|
+
r = self._client.post(url, data=full_data, **kwargs)
|
|
73
|
+
return self._wrap(r)
|
|
74
|
+
|
|
75
|
+
def get(
|
|
76
|
+
self,
|
|
77
|
+
url: str,
|
|
78
|
+
params: dict[str, Any],
|
|
79
|
+
*,
|
|
80
|
+
timeout: httpx.Timeout | None = None,
|
|
81
|
+
) -> HTTPResponse:
|
|
82
|
+
full = dict(params)
|
|
83
|
+
full.setdefault("api_token", self._current_token())
|
|
84
|
+
kwargs: dict[str, Any] = {}
|
|
85
|
+
if timeout is not None:
|
|
86
|
+
kwargs["timeout"] = timeout
|
|
87
|
+
r = self._client.get(url, params=full, **kwargs)
|
|
88
|
+
return self._wrap(r)
|
|
89
|
+
|
|
90
|
+
def _wrap(self, r: httpx.Response) -> HTTPResponse:
|
|
91
|
+
try:
|
|
92
|
+
body: Any = r.json()
|
|
93
|
+
except Exception:
|
|
94
|
+
body = None
|
|
95
|
+
return HTTPResponse(
|
|
96
|
+
json_body=body,
|
|
97
|
+
http_status=r.status_code,
|
|
98
|
+
request_id=_request_id(r.headers),
|
|
99
|
+
raw_text=r.text,
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
def close(self) -> None:
|
|
103
|
+
if self._owned:
|
|
104
|
+
self._client.close()
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
class AsyncHTTPClient:
|
|
108
|
+
def __init__(
|
|
109
|
+
self,
|
|
110
|
+
api_token: str,
|
|
111
|
+
*,
|
|
112
|
+
timeouts: httpx.Timeout = DEFAULT_TIMEOUTS,
|
|
113
|
+
httpx_client: httpx.AsyncClient | None = None,
|
|
114
|
+
) -> None:
|
|
115
|
+
self._token_lock = threading.Lock()
|
|
116
|
+
self._api_token = api_token
|
|
117
|
+
self._owned = httpx_client is None
|
|
118
|
+
self._client = httpx_client or httpx.AsyncClient(
|
|
119
|
+
timeout=timeouts,
|
|
120
|
+
headers={"User-Agent": user_agent()},
|
|
121
|
+
)
|
|
122
|
+
if not self._owned and "User-Agent" not in self._client.headers:
|
|
123
|
+
self._client.headers["User-Agent"] = user_agent()
|
|
124
|
+
|
|
125
|
+
def set_api_token(self, new_token: str) -> None:
|
|
126
|
+
"""Atomically swap the token used for subsequent requests."""
|
|
127
|
+
with self._token_lock:
|
|
128
|
+
self._api_token = new_token
|
|
129
|
+
|
|
130
|
+
def _current_token(self) -> str:
|
|
131
|
+
with self._token_lock:
|
|
132
|
+
return self._api_token
|
|
133
|
+
|
|
134
|
+
async def post_form(
|
|
135
|
+
self,
|
|
136
|
+
url: str,
|
|
137
|
+
data: dict[str, Any],
|
|
138
|
+
*,
|
|
139
|
+
timeout: httpx.Timeout | None = None,
|
|
140
|
+
files: Mapping[str, Any] | None = None,
|
|
141
|
+
) -> HTTPResponse:
|
|
142
|
+
full_data = dict(data)
|
|
143
|
+
full_data["api_token"] = self._current_token()
|
|
144
|
+
kwargs: dict[str, Any] = {}
|
|
145
|
+
if timeout is not None:
|
|
146
|
+
kwargs["timeout"] = timeout
|
|
147
|
+
if files is not None:
|
|
148
|
+
kwargs["files"] = files
|
|
149
|
+
r = await self._client.post(url, data=full_data, **kwargs)
|
|
150
|
+
return _wrap_async(r)
|
|
151
|
+
|
|
152
|
+
async def get(
|
|
153
|
+
self,
|
|
154
|
+
url: str,
|
|
155
|
+
params: dict[str, Any],
|
|
156
|
+
*,
|
|
157
|
+
timeout: httpx.Timeout | None = None,
|
|
158
|
+
) -> HTTPResponse:
|
|
159
|
+
full = dict(params)
|
|
160
|
+
full.setdefault("api_token", self._current_token())
|
|
161
|
+
kwargs: dict[str, Any] = {}
|
|
162
|
+
if timeout is not None:
|
|
163
|
+
kwargs["timeout"] = timeout
|
|
164
|
+
r = await self._client.get(url, params=full, **kwargs)
|
|
165
|
+
return _wrap_async(r)
|
|
166
|
+
|
|
167
|
+
async def aclose(self) -> None:
|
|
168
|
+
if self._owned:
|
|
169
|
+
await self._client.aclose()
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def _wrap_async(r: httpx.Response) -> HTTPResponse:
|
|
173
|
+
try:
|
|
174
|
+
body: Any = r.json()
|
|
175
|
+
except Exception:
|
|
176
|
+
body = None
|
|
177
|
+
return HTTPResponse(
|
|
178
|
+
json_body=body,
|
|
179
|
+
http_status=r.status_code,
|
|
180
|
+
request_id=_request_id(r.headers),
|
|
181
|
+
raw_text=r.text,
|
|
182
|
+
)
|
audd/_retry.py
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
"""Cost-aware retry policy."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import asyncio
|
|
5
|
+
import enum
|
|
6
|
+
import random
|
|
7
|
+
import time
|
|
8
|
+
from collections.abc import Awaitable
|
|
9
|
+
from dataclasses import dataclass
|
|
10
|
+
from typing import Callable, TypeVar
|
|
11
|
+
|
|
12
|
+
import httpx
|
|
13
|
+
|
|
14
|
+
from audd._http import HTTPResponse
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class RetryClass(enum.Enum):
|
|
18
|
+
"""Determines which conditions are retryable for a given endpoint.
|
|
19
|
+
|
|
20
|
+
READ — idempotent reads (streams.list, streams.get_callback_url):
|
|
21
|
+
retry on 408/429/5xx + any connection error.
|
|
22
|
+
RECOGNITION — recognize, recognize_enterprise, advanced.find_lyrics:
|
|
23
|
+
retry on pre-upload connection failures + 5xx.
|
|
24
|
+
DO NOT retry on read-timeout-after-upload (cost protection).
|
|
25
|
+
MUTATING — streams.set_callback_url, streams.add, streams.delete, etc.,
|
|
26
|
+
custom_catalog.add: retry only on pre-upload connection failures.
|
|
27
|
+
DO NOT retry 5xx (the side effect may have happened).
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
READ = "read"
|
|
31
|
+
RECOGNITION = "recognition"
|
|
32
|
+
MUTATING = "mutating"
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@dataclass(frozen=True)
|
|
36
|
+
class RetryPolicy:
|
|
37
|
+
retry_class: RetryClass
|
|
38
|
+
max_attempts: int = 3
|
|
39
|
+
backoff_factor: float = 0.5
|
|
40
|
+
backoff_max: float = 30.0
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
T = TypeVar("T")
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _sync_sleep(seconds: float) -> None:
|
|
47
|
+
time.sleep(seconds)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
async def _async_sleep(seconds: float) -> None:
|
|
51
|
+
await asyncio.sleep(seconds)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def _backoff_delay(attempt: int, policy: RetryPolicy) -> float:
|
|
55
|
+
base = min(policy.backoff_factor * (2**attempt), policy.backoff_max)
|
|
56
|
+
jitter = 0.5 + random.random()
|
|
57
|
+
return float(base * jitter)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def _is_pre_upload_connection_error(exc: BaseException) -> bool:
|
|
61
|
+
"""Errors raised before the request body finished uploading (safe to retry)."""
|
|
62
|
+
return isinstance(exc, (httpx.ConnectError, httpx.ConnectTimeout, httpx.WriteError))
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
_HTTP_REQUEST_TIMEOUT = 408
|
|
66
|
+
_HTTP_TOO_MANY_REQUESTS = 429
|
|
67
|
+
_HTTP_SERVER_ERROR_FLOOR = 500
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def _should_retry_response(resp: HTTPResponse, retry_class: RetryClass) -> bool:
|
|
71
|
+
s = resp.http_status
|
|
72
|
+
if retry_class == RetryClass.READ:
|
|
73
|
+
retryable_specific = (_HTTP_REQUEST_TIMEOUT, _HTTP_TOO_MANY_REQUESTS)
|
|
74
|
+
return s in retryable_specific or s >= _HTTP_SERVER_ERROR_FLOOR
|
|
75
|
+
if retry_class == RetryClass.RECOGNITION:
|
|
76
|
+
return s >= _HTTP_SERVER_ERROR_FLOOR
|
|
77
|
+
if retry_class == RetryClass.MUTATING:
|
|
78
|
+
return False
|
|
79
|
+
raise AssertionError(f"unhandled RetryClass {retry_class!r}")
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def _should_retry_exception(exc: BaseException, retry_class: RetryClass) -> bool:
|
|
83
|
+
if retry_class == RetryClass.READ:
|
|
84
|
+
return isinstance(exc, httpx.RequestError)
|
|
85
|
+
if retry_class == RetryClass.RECOGNITION:
|
|
86
|
+
return _is_pre_upload_connection_error(exc)
|
|
87
|
+
if retry_class == RetryClass.MUTATING:
|
|
88
|
+
return _is_pre_upload_connection_error(exc)
|
|
89
|
+
raise AssertionError(f"unhandled RetryClass {retry_class!r}")
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def retry_sync(fn: Callable[[], HTTPResponse], policy: RetryPolicy) -> HTTPResponse:
|
|
93
|
+
last_exc: BaseException | None = None
|
|
94
|
+
last_resp: HTTPResponse | None = None
|
|
95
|
+
for attempt in range(policy.max_attempts):
|
|
96
|
+
try:
|
|
97
|
+
resp = fn()
|
|
98
|
+
except BaseException as exc:
|
|
99
|
+
last_exc = exc
|
|
100
|
+
last_resp = None
|
|
101
|
+
if not _should_retry_exception(exc, policy.retry_class):
|
|
102
|
+
raise
|
|
103
|
+
if attempt + 1 >= policy.max_attempts:
|
|
104
|
+
raise
|
|
105
|
+
_sync_sleep(_backoff_delay(attempt, policy))
|
|
106
|
+
continue
|
|
107
|
+
|
|
108
|
+
if not _should_retry_response(resp, policy.retry_class):
|
|
109
|
+
return resp
|
|
110
|
+
last_resp = resp
|
|
111
|
+
last_exc = None
|
|
112
|
+
if attempt + 1 >= policy.max_attempts:
|
|
113
|
+
return resp
|
|
114
|
+
_sync_sleep(_backoff_delay(attempt, policy))
|
|
115
|
+
if last_resp is not None:
|
|
116
|
+
return last_resp
|
|
117
|
+
assert last_exc is not None
|
|
118
|
+
raise last_exc
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
async def retry_async(
|
|
122
|
+
fn: Callable[[], Awaitable[HTTPResponse]],
|
|
123
|
+
policy: RetryPolicy,
|
|
124
|
+
) -> HTTPResponse:
|
|
125
|
+
last_exc: BaseException | None = None
|
|
126
|
+
last_resp: HTTPResponse | None = None
|
|
127
|
+
for attempt in range(policy.max_attempts):
|
|
128
|
+
try:
|
|
129
|
+
resp = await fn()
|
|
130
|
+
except BaseException as exc:
|
|
131
|
+
last_exc = exc
|
|
132
|
+
last_resp = None
|
|
133
|
+
if not _should_retry_exception(exc, policy.retry_class):
|
|
134
|
+
raise
|
|
135
|
+
if attempt + 1 >= policy.max_attempts:
|
|
136
|
+
raise
|
|
137
|
+
await _async_sleep(_backoff_delay(attempt, policy))
|
|
138
|
+
continue
|
|
139
|
+
|
|
140
|
+
if not _should_retry_response(resp, policy.retry_class):
|
|
141
|
+
return resp
|
|
142
|
+
last_resp = resp
|
|
143
|
+
last_exc = None
|
|
144
|
+
if attempt + 1 >= policy.max_attempts:
|
|
145
|
+
return resp
|
|
146
|
+
await _async_sleep(_backoff_delay(attempt, policy))
|
|
147
|
+
if last_resp is not None:
|
|
148
|
+
return last_resp
|
|
149
|
+
assert last_exc is not None
|
|
150
|
+
raise last_exc
|
audd/_source.py
ADDED
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
"""Auto-detect what kind of audio source the caller passed and convert to the
|
|
2
|
+
right multipart fields.
|
|
3
|
+
|
|
4
|
+
We return a *re-opener* — a 0-arg callable that yields fresh form-data on
|
|
5
|
+
each call. The client invokes it inside the retry-wrapped request closure.
|
|
6
|
+
|
|
7
|
+
httpx specifically auto-seeks file handles between `post()` calls, so a
|
|
8
|
+
naive once-open implementation would not actually break in our case. The
|
|
9
|
+
re-opener is defensive: (a) it doesn't depend on any specific HTTP-library
|
|
10
|
+
behavior, (b) it raises cleanly on unseekable streams when a retry is
|
|
11
|
+
attempted (rather than silently sending an empty body), and (c) sibling
|
|
12
|
+
SDKs in other languages must follow this pattern because their HTTP
|
|
13
|
+
libraries may not auto-seek. The shape stays consistent across the family.
|
|
14
|
+
"""
|
|
15
|
+
from __future__ import annotations
|
|
16
|
+
|
|
17
|
+
from io import IOBase
|
|
18
|
+
from pathlib import Path
|
|
19
|
+
from typing import Any, Callable, Optional, Union
|
|
20
|
+
|
|
21
|
+
# Source: URL string, filesystem path (str or Path), file-like, or raw bytes.
|
|
22
|
+
Source = Union[str, Path, IOBase, bytes, bytearray]
|
|
23
|
+
|
|
24
|
+
# Output: (data, files_or_None). `files` may be None for URL-source.
|
|
25
|
+
PreparedRequest = tuple[dict[str, Any], Optional[dict[str, Any]]]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def _looks_like_url(s: str) -> bool:
|
|
29
|
+
return s.startswith("http://") or s.startswith("https://")
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def prepare_source(source: Any) -> Callable[[], PreparedRequest]:
|
|
33
|
+
"""Return a re-opener: a 0-arg callable that yields fresh (data, files) on each call.
|
|
34
|
+
|
|
35
|
+
URLs go in `data["url"]`; paths/file-likes/bytes go in `files["file"]`.
|
|
36
|
+
|
|
37
|
+
For file paths and bytes, each call returns a fresh handle/buffer so that
|
|
38
|
+
retried requests don't read from an exhausted source. For file-like
|
|
39
|
+
objects (IOBase), each call seeks back to the original position before
|
|
40
|
+
returning, if the object is seekable; if not, retrying that source
|
|
41
|
+
raises immediately on attempt 2 (we'd send zero bytes otherwise).
|
|
42
|
+
"""
|
|
43
|
+
# URL source: cheap, no body re-creation needed.
|
|
44
|
+
if isinstance(source, str) and _looks_like_url(source):
|
|
45
|
+
url = source
|
|
46
|
+
def _do_url() -> PreparedRequest:
|
|
47
|
+
return ({"url": url}, None)
|
|
48
|
+
return _do_url
|
|
49
|
+
|
|
50
|
+
# Filesystem path (str or Path): open a fresh handle each attempt.
|
|
51
|
+
if isinstance(source, (str, Path)):
|
|
52
|
+
path = Path(source)
|
|
53
|
+
if isinstance(source, str) and not path.exists():
|
|
54
|
+
# User probably mistyped a URL — give them a hint instead of FileNotFoundError.
|
|
55
|
+
raise TypeError(
|
|
56
|
+
f"{source!r} is not an HTTP URL (must start with http:// or https://) "
|
|
57
|
+
f"and is not an existing file path. Pass a URL, a Path, a file-like, or bytes."
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
def _do_path() -> PreparedRequest:
|
|
61
|
+
return ({}, {"file": (path.name, path.open("rb"), "application/octet-stream")})
|
|
62
|
+
return _do_path
|
|
63
|
+
|
|
64
|
+
# Raw bytes: each attempt sends a copy.
|
|
65
|
+
if isinstance(source, (bytes, bytearray)):
|
|
66
|
+
buf = bytes(source)
|
|
67
|
+
def _do_bytes() -> PreparedRequest:
|
|
68
|
+
return ({}, {"file": ("upload.bin", buf, "application/octet-stream")})
|
|
69
|
+
return _do_bytes
|
|
70
|
+
|
|
71
|
+
# File-like object: seek back to the original position on each attempt.
|
|
72
|
+
if hasattr(source, "read"):
|
|
73
|
+
fl: Any = source
|
|
74
|
+
name = getattr(fl, "name", "upload.bin")
|
|
75
|
+
try:
|
|
76
|
+
start = fl.tell()
|
|
77
|
+
seekable = True
|
|
78
|
+
except (AttributeError, OSError):
|
|
79
|
+
start = None
|
|
80
|
+
seekable = False
|
|
81
|
+
first_call = [True]
|
|
82
|
+
|
|
83
|
+
def _do_filelike() -> PreparedRequest:
|
|
84
|
+
if first_call[0]:
|
|
85
|
+
first_call[0] = False
|
|
86
|
+
else:
|
|
87
|
+
if not seekable or start is None:
|
|
88
|
+
raise RuntimeError(
|
|
89
|
+
"Cannot retry an unseekable file-like source. Pass bytes "
|
|
90
|
+
"(buffer the content yourself) or use a Path / URL."
|
|
91
|
+
)
|
|
92
|
+
fl.seek(start)
|
|
93
|
+
return ({}, {"file": (name, fl, "application/octet-stream")})
|
|
94
|
+
return _do_filelike
|
|
95
|
+
|
|
96
|
+
raise TypeError(
|
|
97
|
+
f"Unsupported source type {type(source).__name__}; "
|
|
98
|
+
"pass a URL string, a path (str or Path), a file-like object, or bytes."
|
|
99
|
+
)
|
audd/_user_agent.py
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
"""Build the User-Agent string sent on every request."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import platform
|
|
5
|
+
import sys
|
|
6
|
+
|
|
7
|
+
from audd._version import __version__
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def user_agent() -> str:
|
|
11
|
+
"""Return e.g. 'audd-python/0.1.0 python/3.12.1 (linux)'."""
|
|
12
|
+
py = ".".join(map(str, sys.version_info[:3]))
|
|
13
|
+
return f"audd-python/{__version__} python/{py} ({platform.system().lower()})"
|
audd/_version.py
ADDED
audd/advanced.py
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
"""Advanced namespace — lyrics search + raw escape hatch.
|
|
2
|
+
|
|
3
|
+
Reach this only via audd.advanced.* — deliberately not on the main client.
|
|
4
|
+
"""
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
import httpx
|
|
10
|
+
|
|
11
|
+
from audd._http import AsyncHTTPClient, HTTPClient
|
|
12
|
+
from audd._retry import RetryPolicy, retry_async, retry_sync
|
|
13
|
+
from audd.errors import (
|
|
14
|
+
AudDConnectionError,
|
|
15
|
+
AudDSerializationError,
|
|
16
|
+
raise_from_error_response,
|
|
17
|
+
)
|
|
18
|
+
from audd.models import LyricsResult
|
|
19
|
+
|
|
20
|
+
API_BASE = "https://api.audd.io"
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class Advanced:
|
|
24
|
+
def __init__(self, http: HTTPClient, read: RetryPolicy) -> None:
|
|
25
|
+
self._http = http
|
|
26
|
+
self._read = read
|
|
27
|
+
|
|
28
|
+
def find_lyrics(self, query: str) -> list[LyricsResult]:
|
|
29
|
+
body = self.raw_request("findLyrics", {"q": query})
|
|
30
|
+
if body.get("status") == "error":
|
|
31
|
+
raise_from_error_response(body, http_status=200, request_id=None)
|
|
32
|
+
return [LyricsResult.model_validate(r) for r in (body.get("result") or [])]
|
|
33
|
+
|
|
34
|
+
def raw_request(
|
|
35
|
+
self,
|
|
36
|
+
method: str,
|
|
37
|
+
params: dict[str, Any] | None = None,
|
|
38
|
+
) -> dict[str, Any]:
|
|
39
|
+
"""Hit any AudD endpoint by method name and return the raw JSON dict.
|
|
40
|
+
|
|
41
|
+
Useful for endpoints not yet wrapped by typed methods on this SDK.
|
|
42
|
+
"""
|
|
43
|
+
data = dict(params or {})
|
|
44
|
+
|
|
45
|
+
def _do() -> Any:
|
|
46
|
+
return self._http.post_form(f"{API_BASE}/{method}/", data=data)
|
|
47
|
+
|
|
48
|
+
try:
|
|
49
|
+
resp = retry_sync(_do, self._read)
|
|
50
|
+
except httpx.RequestError as exc:
|
|
51
|
+
raise AudDConnectionError(str(exc), original=exc) from exc
|
|
52
|
+
body = resp.json_body
|
|
53
|
+
if not isinstance(body, dict):
|
|
54
|
+
raise AudDSerializationError("Unparseable response")
|
|
55
|
+
return body
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class AsyncAdvanced:
|
|
59
|
+
def __init__(self, http: AsyncHTTPClient, read: RetryPolicy) -> None:
|
|
60
|
+
self._http = http
|
|
61
|
+
self._read = read
|
|
62
|
+
|
|
63
|
+
async def find_lyrics(self, query: str) -> list[LyricsResult]:
|
|
64
|
+
body = await self.raw_request("findLyrics", {"q": query})
|
|
65
|
+
if body.get("status") == "error":
|
|
66
|
+
raise_from_error_response(body, http_status=200, request_id=None)
|
|
67
|
+
return [LyricsResult.model_validate(r) for r in (body.get("result") or [])]
|
|
68
|
+
|
|
69
|
+
async def raw_request(
|
|
70
|
+
self,
|
|
71
|
+
method: str,
|
|
72
|
+
params: dict[str, Any] | None = None,
|
|
73
|
+
) -> dict[str, Any]:
|
|
74
|
+
data = dict(params or {})
|
|
75
|
+
|
|
76
|
+
async def _do() -> Any:
|
|
77
|
+
return await self._http.post_form(f"{API_BASE}/{method}/", data=data)
|
|
78
|
+
|
|
79
|
+
try:
|
|
80
|
+
resp = await retry_async(_do, self._read)
|
|
81
|
+
except httpx.RequestError as exc:
|
|
82
|
+
raise AudDConnectionError(str(exc), original=exc) from exc
|
|
83
|
+
body = resp.json_body
|
|
84
|
+
if not isinstance(body, dict):
|
|
85
|
+
raise AudDSerializationError("Unparseable response")
|
|
86
|
+
return body
|