repeaterbook 0.3.0__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- repeaterbook/__init__.py +10 -0
- repeaterbook/exceptions.py +44 -0
- repeaterbook/models.py +28 -1
- repeaterbook/services.py +97 -20
- {repeaterbook-0.3.0.dist-info → repeaterbook-0.4.0.dist-info}/METADATA +1 -1
- repeaterbook-0.4.0.dist-info/RECORD +12 -0
- repeaterbook-0.3.0.dist-info/RECORD +0 -11
- {repeaterbook-0.3.0.dist-info → repeaterbook-0.4.0.dist-info}/WHEEL +0 -0
- {repeaterbook-0.3.0.dist-info → repeaterbook-0.4.0.dist-info}/licenses/LICENSE +0 -0
repeaterbook/__init__.py
CHANGED
|
@@ -5,7 +5,17 @@ from __future__ import annotations
|
|
|
5
5
|
__all__: tuple[str, ...] = (
|
|
6
6
|
"Repeater",
|
|
7
7
|
"RepeaterBook",
|
|
8
|
+
"RepeaterBookAPIError",
|
|
9
|
+
"RepeaterBookCacheError",
|
|
10
|
+
"RepeaterBookError",
|
|
11
|
+
"RepeaterBookValidationError",
|
|
8
12
|
)
|
|
9
13
|
|
|
10
14
|
from repeaterbook.database import RepeaterBook
|
|
15
|
+
from repeaterbook.exceptions import (
|
|
16
|
+
RepeaterBookAPIError,
|
|
17
|
+
RepeaterBookCacheError,
|
|
18
|
+
RepeaterBookError,
|
|
19
|
+
RepeaterBookValidationError,
|
|
20
|
+
)
|
|
11
21
|
from repeaterbook.models import Repeater
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
"""Custom exceptions for RepeaterBook library."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
__all__: tuple[str, ...] = (
|
|
6
|
+
"RepeaterBookAPIError",
|
|
7
|
+
"RepeaterBookCacheError",
|
|
8
|
+
"RepeaterBookError",
|
|
9
|
+
"RepeaterBookValidationError",
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class RepeaterBookError(Exception):
|
|
14
|
+
"""Base exception for RepeaterBook library.
|
|
15
|
+
|
|
16
|
+
All RepeaterBook-specific exceptions inherit from this class,
|
|
17
|
+
making it easy to catch all library errors with a single except clause.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class RepeaterBookAPIError(RepeaterBookError):
|
|
22
|
+
"""Error returned by the RepeaterBook API.
|
|
23
|
+
|
|
24
|
+
Raised when the API returns an error response (status: "error").
|
|
25
|
+
The error message from the API is preserved in the exception message.
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class RepeaterBookCacheError(RepeaterBookError):
|
|
30
|
+
"""Error during cache operations.
|
|
31
|
+
|
|
32
|
+
Raised when reading from or writing to the cache fails,
|
|
33
|
+
such as file permission issues or disk full errors.
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class RepeaterBookValidationError(RepeaterBookError):
|
|
38
|
+
"""Invalid data or response format.
|
|
39
|
+
|
|
40
|
+
Raised when:
|
|
41
|
+
- API response is not in expected format (not a dict)
|
|
42
|
+
- Required fields are missing from the response
|
|
43
|
+
- Data values fail validation (e.g., invalid coordinates)
|
|
44
|
+
"""
|
repeaterbook/models.py
CHANGED
|
@@ -34,6 +34,7 @@ from typing import Literal, TypeAlias, TypedDict
|
|
|
34
34
|
|
|
35
35
|
import attrs
|
|
36
36
|
from pycountry.db import Country # noqa: TC002
|
|
37
|
+
from pydantic import field_validator
|
|
37
38
|
from sqlmodel import Field, SQLModel
|
|
38
39
|
|
|
39
40
|
|
|
@@ -103,7 +104,6 @@ class Repeater(SQLModel, table=True):
|
|
|
103
104
|
races: str | None
|
|
104
105
|
skywarn: str | None
|
|
105
106
|
canwarn: str | None
|
|
106
|
-
#' operating_mode: str
|
|
107
107
|
allstar_node: str | None
|
|
108
108
|
echolink_node: str | None
|
|
109
109
|
irlp_node: str | None
|
|
@@ -129,6 +129,33 @@ class Repeater(SQLModel, table=True):
|
|
|
129
129
|
notes: str | None
|
|
130
130
|
last_update: date
|
|
131
131
|
|
|
132
|
+
@field_validator("latitude")
|
|
133
|
+
@classmethod
|
|
134
|
+
def validate_latitude(cls, v: Decimal) -> Decimal:
|
|
135
|
+
"""Validate latitude is within valid range."""
|
|
136
|
+
if not Decimal(-90) <= v <= Decimal(90):
|
|
137
|
+
msg = f"Latitude must be between -90 and 90, got {v}"
|
|
138
|
+
raise ValueError(msg)
|
|
139
|
+
return v
|
|
140
|
+
|
|
141
|
+
@field_validator("longitude")
|
|
142
|
+
@classmethod
|
|
143
|
+
def validate_longitude(cls, v: Decimal) -> Decimal:
|
|
144
|
+
"""Validate longitude is within valid range."""
|
|
145
|
+
if not Decimal(-180) <= v <= Decimal(180):
|
|
146
|
+
msg = f"Longitude must be between -180 and 180, got {v}"
|
|
147
|
+
raise ValueError(msg)
|
|
148
|
+
return v
|
|
149
|
+
|
|
150
|
+
@field_validator("frequency", "input_frequency")
|
|
151
|
+
@classmethod
|
|
152
|
+
def validate_frequency(cls, v: Decimal) -> Decimal:
|
|
153
|
+
"""Validate frequency is positive."""
|
|
154
|
+
if v <= 0:
|
|
155
|
+
msg = f"Frequency must be positive, got {v}"
|
|
156
|
+
raise ValueError(msg)
|
|
157
|
+
return v
|
|
158
|
+
|
|
132
159
|
|
|
133
160
|
ZeroOneJSON: TypeAlias = Literal[
|
|
134
161
|
0,
|
repeaterbook/services.py
CHANGED
|
@@ -16,7 +16,7 @@ import hashlib
|
|
|
16
16
|
import json
|
|
17
17
|
import time
|
|
18
18
|
from datetime import date, timedelta
|
|
19
|
-
from typing import Any,
|
|
19
|
+
from typing import Any, Final, cast
|
|
20
20
|
|
|
21
21
|
import aiohttp
|
|
22
22
|
import attrs
|
|
@@ -25,6 +25,10 @@ from loguru import logger
|
|
|
25
25
|
from tqdm import tqdm
|
|
26
26
|
from yarl import URL
|
|
27
27
|
|
|
28
|
+
from repeaterbook.exceptions import (
|
|
29
|
+
RepeaterBookAPIError,
|
|
30
|
+
RepeaterBookValidationError,
|
|
31
|
+
)
|
|
28
32
|
from repeaterbook.models import (
|
|
29
33
|
Emergency,
|
|
30
34
|
EmergencyJSON,
|
|
@@ -51,7 +55,7 @@ async def fetch_json(
|
|
|
51
55
|
cache_dir: Path | None = None,
|
|
52
56
|
max_cache_age: timedelta = timedelta(seconds=3600),
|
|
53
57
|
chunk_size: int = 1024,
|
|
54
|
-
) -> Any: # noqa: ANN401
|
|
58
|
+
) -> Any: # noqa: ANN401 - json.loads() returns Any; validation done by callers
|
|
55
59
|
"""Fetches JSON data from the specified URL using a streaming response.
|
|
56
60
|
|
|
57
61
|
- If a cached copy exists and is recent (not older than max_cache_age seconds) and
|
|
@@ -62,17 +66,19 @@ async def fetch_json(
|
|
|
62
66
|
# Create a unique filename for caching based on the URL hash.
|
|
63
67
|
if cache_dir is None:
|
|
64
68
|
cache_dir = Path()
|
|
65
|
-
hashed_url = hashlib.
|
|
69
|
+
hashed_url = hashlib.sha256(str(url).encode("utf-8")).hexdigest()
|
|
66
70
|
cache_file = cache_dir / f"api_cache_{hashed_url}.json"
|
|
71
|
+
temp_file = cache_dir / f"api_cache_{hashed_url}.tmp"
|
|
67
72
|
|
|
68
|
-
# Check if fresh cached data exists.
|
|
69
|
-
|
|
70
|
-
|
|
73
|
+
# Check if fresh cached data exists using a single stat call.
|
|
74
|
+
try:
|
|
75
|
+
stat = await cache_file.stat()
|
|
76
|
+
file_age = time.time() - stat.st_mtime
|
|
71
77
|
if file_age < max_cache_age.total_seconds():
|
|
72
78
|
logger.info("Using cached data.")
|
|
73
79
|
return json.loads(await cache_file.read_text(encoding="utf-8"))
|
|
74
|
-
|
|
75
|
-
|
|
80
|
+
except FileNotFoundError:
|
|
81
|
+
pass # Cache doesn't exist, continue to fetch
|
|
76
82
|
|
|
77
83
|
logger.info("Fetching new data from API...")
|
|
78
84
|
async with (
|
|
@@ -80,8 +86,8 @@ async def fetch_json(
|
|
|
80
86
|
session.get(url, headers=headers) as response,
|
|
81
87
|
):
|
|
82
88
|
response.raise_for_status()
|
|
83
|
-
#
|
|
84
|
-
async with await
|
|
89
|
+
# Write to temp file first for atomic cache updates.
|
|
90
|
+
async with await temp_file.open("wb") as f:
|
|
85
91
|
with tqdm(
|
|
86
92
|
total=response.content_length,
|
|
87
93
|
unit="B",
|
|
@@ -91,6 +97,11 @@ async def fetch_json(
|
|
|
91
97
|
await f.write(chunk)
|
|
92
98
|
progress.update(len(chunk))
|
|
93
99
|
|
|
100
|
+
# Atomic rename from temp file to cache file.
|
|
101
|
+
# This prevents race conditions where concurrent requests might read
|
|
102
|
+
# a partially written cache file.
|
|
103
|
+
await temp_file.rename(cache_file)
|
|
104
|
+
|
|
94
105
|
# After saving the file, load and parse the JSON data.
|
|
95
106
|
return json.loads(await cache_file.read_text(encoding="utf-8"))
|
|
96
107
|
|
|
@@ -209,6 +220,16 @@ class RepeaterBookAPI:
|
|
|
209
220
|
"""RepeaterBook API client.
|
|
210
221
|
|
|
211
222
|
Must read https://www.repeaterbook.com/wiki/doku.php?id=api before using.
|
|
223
|
+
|
|
224
|
+
Attributes:
|
|
225
|
+
base_url: The RepeaterBook API base URL.
|
|
226
|
+
app_name: Application name for User-Agent header.
|
|
227
|
+
app_email: Contact email for User-Agent header.
|
|
228
|
+
working_dir: Directory for cache and database files.
|
|
229
|
+
max_cache_age: Maximum age of cached API responses before refresh.
|
|
230
|
+
Defaults to 1 hour.
|
|
231
|
+
max_count: Maximum expected results per API request. Used to warn
|
|
232
|
+
when response may have been trimmed. Defaults to 3500.
|
|
212
233
|
"""
|
|
213
234
|
|
|
214
235
|
base_url: URL = attrs.Factory(lambda: URL("https://repeaterbook.com"))
|
|
@@ -217,7 +238,8 @@ class RepeaterBookAPI:
|
|
|
217
238
|
|
|
218
239
|
working_dir: Path = attrs.Factory(Path)
|
|
219
240
|
|
|
220
|
-
|
|
241
|
+
max_cache_age: timedelta = timedelta(hours=1)
|
|
242
|
+
max_count: int = 3500
|
|
221
243
|
|
|
222
244
|
async def cache_dir(self) -> Path:
|
|
223
245
|
"""Cache directory for API responses."""
|
|
@@ -246,11 +268,26 @@ class RepeaterBookAPI:
|
|
|
246
268
|
"""Rest of world (not north-america) export URL."""
|
|
247
269
|
return self.url_api / "exportROW.php"
|
|
248
270
|
|
|
271
|
+
# North America countries served by export.php endpoint
|
|
272
|
+
NA_COUNTRIES: frozenset[str] = frozenset({
|
|
273
|
+
"United States",
|
|
274
|
+
"Canada",
|
|
275
|
+
"Mexico",
|
|
276
|
+
})
|
|
277
|
+
|
|
249
278
|
def urls_export(
|
|
250
279
|
self,
|
|
251
280
|
query: ExportQuery,
|
|
252
281
|
) -> set[URL]:
|
|
253
|
-
"""Generate export URLs for given query.
|
|
282
|
+
"""Generate export URLs for given query.
|
|
283
|
+
|
|
284
|
+
Smart routing logic:
|
|
285
|
+
- If NA-specific fields are used (state_id, county, emcomm, stype),
|
|
286
|
+
only query the NA endpoint
|
|
287
|
+
- If ROW-specific fields are used (region), only query the ROW endpoint
|
|
288
|
+
- If countries are specified, route based on whether they're NA or ROW
|
|
289
|
+
- If no routing hints, query both endpoints
|
|
290
|
+
"""
|
|
254
291
|
mode_map: dict[Mode, ModeJSON] = {
|
|
255
292
|
Mode.ANALOG: "analog",
|
|
256
293
|
Mode.DMR: "DMR",
|
|
@@ -268,6 +305,33 @@ class RepeaterBookAPI:
|
|
|
268
305
|
ServiceType.GMRS: "GMRS",
|
|
269
306
|
}
|
|
270
307
|
|
|
308
|
+
# Determine which endpoints to query based on the query parameters
|
|
309
|
+
has_na_specific = bool(
|
|
310
|
+
query.state_ids or query.counties or
|
|
311
|
+
query.emergency_services or query.service_types
|
|
312
|
+
)
|
|
313
|
+
has_row_specific = bool(query.regions)
|
|
314
|
+
|
|
315
|
+
# Check if countries are specified and categorize them
|
|
316
|
+
query_countries = {country.name for country in query.countries}
|
|
317
|
+
has_na_countries = bool(query_countries & self.NA_COUNTRIES)
|
|
318
|
+
has_row_countries = bool(query_countries - self.NA_COUNTRIES)
|
|
319
|
+
|
|
320
|
+
# Determine which endpoints to query
|
|
321
|
+
query_na_endpoint = True
|
|
322
|
+
query_row_endpoint = True
|
|
323
|
+
|
|
324
|
+
if has_na_specific and not has_row_specific:
|
|
325
|
+
# NA-specific fields used, only query NA
|
|
326
|
+
query_row_endpoint = False
|
|
327
|
+
elif has_row_specific and not has_na_specific:
|
|
328
|
+
# ROW-specific fields used, only query ROW
|
|
329
|
+
query_na_endpoint = False
|
|
330
|
+
elif query_countries:
|
|
331
|
+
# Countries specified - route based on country location
|
|
332
|
+
query_na_endpoint = has_na_countries
|
|
333
|
+
query_row_endpoint = has_row_countries
|
|
334
|
+
|
|
271
335
|
query_na = ExportNorthAmericaQuery(
|
|
272
336
|
callsign=list(query.callsigns),
|
|
273
337
|
city=list(query.cities),
|
|
@@ -280,6 +344,8 @@ class RepeaterBookAPI:
|
|
|
280
344
|
emcomm=[emergency_map[emergency] for emergency in query.emergency_services],
|
|
281
345
|
stype=[type_map[service_type] for service_type in query.service_types],
|
|
282
346
|
)
|
|
347
|
+
# Safe cast: dict comprehension preserves TypedDict structure, only removes
|
|
348
|
+
# empty values (which are optional in ExportNorthAmericaQuery).
|
|
283
349
|
query_na = cast(
|
|
284
350
|
"ExportNorthAmericaQuery", {k: v for k, v in query_na.items() if v}
|
|
285
351
|
)
|
|
@@ -293,14 +359,22 @@ class RepeaterBookAPI:
|
|
|
293
359
|
mode=[mode_map[mode] for mode in query.modes],
|
|
294
360
|
region=list(query.regions),
|
|
295
361
|
)
|
|
362
|
+
# Safe cast: dict comprehension preserves TypedDict structure, only removes
|
|
363
|
+
# empty values (which are optional in ExportWorldQuery).
|
|
296
364
|
query_world = cast(
|
|
297
365
|
"ExportWorldQuery", {k: v for k, v in query_world.items() if v}
|
|
298
366
|
)
|
|
299
367
|
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
368
|
+
# Safe casts: URL % operator expects dict[str, str], and TypedDict values
|
|
369
|
+
# are all list[str] which serialize correctly for query parameters.
|
|
370
|
+
urls: set[URL] = set()
|
|
371
|
+
if query_na_endpoint:
|
|
372
|
+
na_params = cast("dict[str, str]", query_na)
|
|
373
|
+
urls.add(self.url_export_north_america % na_params)
|
|
374
|
+
if query_row_endpoint:
|
|
375
|
+
row_params = cast("dict[str, str]", query_world)
|
|
376
|
+
urls.add(self.url_export_rest_of_world % row_params)
|
|
377
|
+
return urls
|
|
304
378
|
|
|
305
379
|
async def export_json(self, url: URL) -> ExportJSON:
|
|
306
380
|
"""Export data for given URL."""
|
|
@@ -308,20 +382,23 @@ class RepeaterBookAPI:
|
|
|
308
382
|
url,
|
|
309
383
|
headers={"User-Agent": f"{self.app_name} <{self.app_email}>"},
|
|
310
384
|
cache_dir=await self.cache_dir(),
|
|
385
|
+
max_cache_age=self.max_cache_age,
|
|
311
386
|
)
|
|
312
387
|
|
|
313
388
|
if not isinstance(data, dict):
|
|
314
|
-
|
|
389
|
+
msg = f"Expected dict response from API, got {type(data).__name__}"
|
|
390
|
+
raise RepeaterBookValidationError(msg)
|
|
315
391
|
|
|
316
392
|
if data.get("status") == "error":
|
|
317
|
-
raise
|
|
393
|
+
raise RepeaterBookAPIError(data.get("message", "Unknown API error"))
|
|
318
394
|
|
|
319
395
|
if "count" not in data or "results" not in data:
|
|
320
|
-
|
|
396
|
+
msg = "API response missing required 'count' or 'results' field"
|
|
397
|
+
raise RepeaterBookValidationError(msg)
|
|
321
398
|
|
|
322
399
|
data = cast("ExportJSON", data)
|
|
323
400
|
|
|
324
|
-
if data["count"] >= self.
|
|
401
|
+
if data["count"] >= self.max_count:
|
|
325
402
|
logger.warning(
|
|
326
403
|
"Reached max count for API response. Response may have been trimmed."
|
|
327
404
|
)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: repeaterbook
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.4.0
|
|
4
4
|
Summary: Python utility to work with data from RepeaterBook.
|
|
5
5
|
Project-URL: homepage, https://github.com/MicaelJarniac/repeaterbook
|
|
6
6
|
Project-URL: source, https://github.com/MicaelJarniac/repeaterbook
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
repeaterbook/__init__.py,sha256=GXcMvuiPupnrlC6YJPiPSSKWM8Kmx9ZWOqASXW-bh_0,519
|
|
2
|
+
repeaterbook/database.py,sha256=oAqCKGWjD4bQk62rsHTZQO0ESzn7UDrmNg78TZVznjY,1954
|
|
3
|
+
repeaterbook/exceptions.py,sha256=yUR_9Lj5PmYc5UcC1O_Om6OfNK4rk1tAiAlNDNA9LC8,1229
|
|
4
|
+
repeaterbook/models.py,sha256=lQjE2Vld4GCSNaXcpsP0v0-Zo-C9N9DpeJ6nV3FVVBw,7840
|
|
5
|
+
repeaterbook/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
|
+
repeaterbook/queries.py,sha256=1VgGNviH5NDCBjc4Q1rt3ORmbrzRsdL2_X_yxGc86DA,3100
|
|
7
|
+
repeaterbook/services.py,sha256=_nMGOiBKGtfkoW1fhEbZnWXZ5JeTVo1GOH7yrF7JMXs,15048
|
|
8
|
+
repeaterbook/utils.py,sha256=QI5aWci8b3GAIi7l3PfT51sg4TTlwzoHtamNKTGkaaI,1453
|
|
9
|
+
repeaterbook-0.4.0.dist-info/METADATA,sha256=VUJco99MK9a5WI_ba17n-KdkSRyN-COXAvyZzV9n9tY,7990
|
|
10
|
+
repeaterbook-0.4.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
11
|
+
repeaterbook-0.4.0.dist-info/licenses/LICENSE,sha256=TtbMt69RbQyifR_It2bTHKdlLR1Dj6x2A5y_oLOyoVk,1071
|
|
12
|
+
repeaterbook-0.4.0.dist-info/RECORD,,
|
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
repeaterbook/__init__.py,sha256=EGIRvLGcrzX5ZD8ZoU13XyL825rFS9zPNRS1RSQwilg,251
|
|
2
|
-
repeaterbook/database.py,sha256=oAqCKGWjD4bQk62rsHTZQO0ESzn7UDrmNg78TZVznjY,1954
|
|
3
|
-
repeaterbook/models.py,sha256=a3-_t6Q6h3kvbOvPiqwMFUN_ZgNMthtuUgjyXEvjv0w,6868
|
|
4
|
-
repeaterbook/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
5
|
-
repeaterbook/queries.py,sha256=1VgGNviH5NDCBjc4Q1rt3ORmbrzRsdL2_X_yxGc86DA,3100
|
|
6
|
-
repeaterbook/services.py,sha256=lb7KZxwM5pCA7tms4LTYh-I0XRG0sq-Uogd_3FCmS8U,11480
|
|
7
|
-
repeaterbook/utils.py,sha256=QI5aWci8b3GAIi7l3PfT51sg4TTlwzoHtamNKTGkaaI,1453
|
|
8
|
-
repeaterbook-0.3.0.dist-info/METADATA,sha256=L4hSRnqgWlWuebn2cnhuX47qZOnIFydmH46iPEqJ2LI,7990
|
|
9
|
-
repeaterbook-0.3.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
10
|
-
repeaterbook-0.3.0.dist-info/licenses/LICENSE,sha256=TtbMt69RbQyifR_It2bTHKdlLR1Dj6x2A5y_oLOyoVk,1071
|
|
11
|
-
repeaterbook-0.3.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|