django-health-check 4.0rc2__tar.gz → 4.0rc4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {django_health_check-4.0rc2 → django_health_check-4.0rc4}/PKG-INFO +4 -1
- {django_health_check-4.0rc2 → django_health_check-4.0rc4}/health_check/_version.py +3 -3
- {django_health_check-4.0rc2 → django_health_check-4.0rc4}/health_check/checks.py +16 -6
- django_health_check-4.0rc4/health_check/contrib/atlassian.py +167 -0
- django_health_check-4.0rc4/health_check/contrib/rss.py +226 -0
- django_health_check-4.0rc4/health_check/management/commands/health_check.py +124 -0
- {django_health_check-4.0rc2 → django_health_check-4.0rc4}/health_check/templates/health_check/index.html +1 -1
- {django_health_check-4.0rc2 → django_health_check-4.0rc4}/pyproject.toml +2 -1
- django_health_check-4.0rc2/health_check/contrib/rss.py +0 -113
- django_health_check-4.0rc2/health_check/management/commands/health_check.py +0 -46
- {django_health_check-4.0rc2 → django_health_check-4.0rc4}/LICENSE +0 -0
- {django_health_check-4.0rc2 → django_health_check-4.0rc4}/README.md +0 -0
- {django_health_check-4.0rc2 → django_health_check-4.0rc4}/health_check/__init__.py +0 -0
- {django_health_check-4.0rc2 → django_health_check-4.0rc4}/health_check/base.py +0 -0
- {django_health_check-4.0rc2 → django_health_check-4.0rc4}/health_check/contrib/__init__.py +0 -0
- {django_health_check-4.0rc2 → django_health_check-4.0rc4}/health_check/contrib/celery.py +0 -0
- {django_health_check-4.0rc2 → django_health_check-4.0rc4}/health_check/contrib/kafka.py +0 -0
- {django_health_check-4.0rc2 → django_health_check-4.0rc4}/health_check/contrib/rabbitmq.py +0 -0
- {django_health_check-4.0rc2 → django_health_check-4.0rc4}/health_check/contrib/redis.py +0 -0
- {django_health_check-4.0rc2 → django_health_check-4.0rc4}/health_check/exceptions.py +0 -0
- {django_health_check-4.0rc2 → django_health_check-4.0rc4}/health_check/management/__init__.py +0 -0
- {django_health_check-4.0rc2 → django_health_check-4.0rc4}/health_check/management/commands/__init__.py +0 -0
- {django_health_check-4.0rc2 → django_health_check-4.0rc4}/health_check/views.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: django-health-check
|
|
3
|
-
Version: 4.
|
|
3
|
+
Version: 4.0rc4
|
|
4
4
|
Summary: Monitor the health of your Django app and its connected services.
|
|
5
5
|
Keywords: django,postgresql
|
|
6
6
|
Author-email: Kristian Ollegaard <kristian@oellegaard.com>, Johannes Maron <johannes@maron.family>
|
|
@@ -29,17 +29,20 @@ License-File: LICENSE
|
|
|
29
29
|
Requires-Dist: Django>=5.2
|
|
30
30
|
Requires-Dist: dnspython>=2.0.0
|
|
31
31
|
Requires-Dist: psutil
|
|
32
|
+
Requires-Dist: httpx>=0.27.0 ; extra == "atlassian"
|
|
32
33
|
Requires-Dist: celery>=5.0.0 ; extra == "celery"
|
|
33
34
|
Requires-Dist: confluent-kafka>=2.0.0 ; extra == "kafka"
|
|
34
35
|
Requires-Dist: aio-pika>=9.0.0 ; extra == "rabbitmq"
|
|
35
36
|
Requires-Dist: redis>=4.2.0 ; extra == "redis"
|
|
36
37
|
Requires-Dist: httpx>=0.27.0 ; extra == "rss"
|
|
38
|
+
Requires-Dist: feedparser>=6.0.0 ; extra == "rss"
|
|
37
39
|
Project-URL: Changelog, https://github.com/codingjoe/django-health-check/releases
|
|
38
40
|
Project-URL: Documentation, https://codingjoe.dev/django-health-check/
|
|
39
41
|
Project-URL: Homepage, https://codingjoe.dev/django-health-check/
|
|
40
42
|
Project-URL: Issues, https://github.com/codingjoe/django-health-check/issues
|
|
41
43
|
Project-URL: Releasenotes, https://github.com/codingjoe/django-health-check/releases/latest
|
|
42
44
|
Project-URL: Source, https://github.com/codingjoe/django-health-check
|
|
45
|
+
Provides-Extra: atlassian
|
|
43
46
|
Provides-Extra: celery
|
|
44
47
|
Provides-Extra: kafka
|
|
45
48
|
Provides-Extra: rabbitmq
|
|
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
|
|
|
28
28
|
commit_id: COMMIT_ID
|
|
29
29
|
__commit_id__: COMMIT_ID
|
|
30
30
|
|
|
31
|
-
__version__ = version = '4.
|
|
32
|
-
__version_tuple__ = version_tuple = (4, 0, '
|
|
31
|
+
__version__ = version = '4.0rc4'
|
|
32
|
+
__version_tuple__ = version_tuple = (4, 0, 'rc4')
|
|
33
33
|
|
|
34
|
-
__commit_id__ = commit_id = '
|
|
34
|
+
__commit_id__ = commit_id = 'g4b2293052'
|
|
@@ -54,20 +54,30 @@ class Cache(HealthCheck):
|
|
|
54
54
|
|
|
55
55
|
Args:
|
|
56
56
|
alias: The cache alias to test against.
|
|
57
|
-
|
|
57
|
+
key_prefix: Prefix for the node specific cache key.
|
|
58
|
+
timeout: Time until probe keys expire in the cache backend.
|
|
58
59
|
|
|
59
60
|
"""
|
|
60
61
|
|
|
61
62
|
alias: str = "default"
|
|
62
|
-
|
|
63
|
+
key_prefix: str = dataclasses.field(default="djangohealthcheck_test", repr=False)
|
|
64
|
+
timeout: datetime.timedelta = dataclasses.field(
|
|
65
|
+
default=datetime.timedelta(seconds=5), repr=False
|
|
66
|
+
)
|
|
63
67
|
|
|
64
68
|
async def run(self):
|
|
65
69
|
cache = caches[self.alias]
|
|
66
|
-
|
|
70
|
+
# Use an isolated key per probe run to avoid cross-process write races.
|
|
71
|
+
cache_key = f"{self.key_prefix}:{uuid.uuid4().hex}"
|
|
72
|
+
cache_value = f"itworks-{datetime.datetime.now().timestamp()}"
|
|
67
73
|
try:
|
|
68
|
-
await cache.aset(
|
|
69
|
-
|
|
70
|
-
|
|
74
|
+
await cache.aset(
|
|
75
|
+
cache_key,
|
|
76
|
+
cache_value,
|
|
77
|
+
timeout=self.timeout.total_seconds(),
|
|
78
|
+
)
|
|
79
|
+
if not await cache.aget(cache_key) == cache_value:
|
|
80
|
+
raise ServiceUnavailable(f"Cache key {cache_key} does not match")
|
|
71
81
|
except CacheKeyWarning as e:
|
|
72
82
|
raise ServiceReturnedUnexpectedResult("Cache key warning") from e
|
|
73
83
|
except ValueError as e:
|
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
"""Atlassian Status Page API health checks for cloud provider status pages."""
|
|
2
|
+
|
|
3
|
+
import dataclasses
|
|
4
|
+
import datetime
|
|
5
|
+
import logging
|
|
6
|
+
import typing
|
|
7
|
+
|
|
8
|
+
import httpx
|
|
9
|
+
|
|
10
|
+
from health_check import HealthCheck, __version__
|
|
11
|
+
from health_check.exceptions import ServiceUnavailable, ServiceWarning
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class AtlassianStatusPage(HealthCheck):
|
|
17
|
+
"""
|
|
18
|
+
Base class for Atlassian status page health checks.
|
|
19
|
+
|
|
20
|
+
Monitor cloud provider service health via Atlassian Status Page API v2.
|
|
21
|
+
|
|
22
|
+
Each subclass should define the `base_url` for the specific status page
|
|
23
|
+
and appropriate `timeout` value. The `max_age` parameter is not used
|
|
24
|
+
since the API endpoint only returns currently unresolved incidents.
|
|
25
|
+
|
|
26
|
+
Examples:
|
|
27
|
+
>>> class FlyIo(AtlassianStatusPage):
|
|
28
|
+
... timeout = datetime.timedelta(seconds=10)
|
|
29
|
+
... base_url = "https://status.flyio.net"
|
|
30
|
+
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
base_url: typing.ClassVar[str] = NotImplemented
|
|
34
|
+
timeout: datetime.timedelta = NotImplemented
|
|
35
|
+
|
|
36
|
+
async def run(self):
|
|
37
|
+
if msg := "\n".join([i async for i in self._fetch_incidents()]):
|
|
38
|
+
raise ServiceWarning(msg)
|
|
39
|
+
logger.debug("No recent incidents found")
|
|
40
|
+
|
|
41
|
+
async def _fetch_incidents(self):
|
|
42
|
+
api_url = f"{self.base_url}/api/v2/incidents/unresolved.json"
|
|
43
|
+
logger.debug("Fetching incidents from %r", api_url)
|
|
44
|
+
|
|
45
|
+
async with httpx.AsyncClient() as client:
|
|
46
|
+
try:
|
|
47
|
+
response = await client.get(
|
|
48
|
+
api_url,
|
|
49
|
+
headers={"User-Agent": f"django-health-check@{__version__}"},
|
|
50
|
+
timeout=self.timeout.total_seconds(),
|
|
51
|
+
follow_redirects=True,
|
|
52
|
+
)
|
|
53
|
+
except httpx.TimeoutException as e:
|
|
54
|
+
raise ServiceUnavailable("API request timed out") from e
|
|
55
|
+
except httpx.RequestError as e:
|
|
56
|
+
raise ServiceUnavailable(f"Failed to fetch API: {e}") from e
|
|
57
|
+
|
|
58
|
+
try:
|
|
59
|
+
response.raise_for_status()
|
|
60
|
+
except httpx.HTTPStatusError as e:
|
|
61
|
+
raise ServiceUnavailable(
|
|
62
|
+
f"HTTP error {e.response.status_code} fetching API from {api_url!r}"
|
|
63
|
+
) from e
|
|
64
|
+
|
|
65
|
+
try:
|
|
66
|
+
data = response.json()
|
|
67
|
+
except ValueError as e:
|
|
68
|
+
raise ServiceUnavailable("Failed to parse JSON response") from e
|
|
69
|
+
|
|
70
|
+
for incident in data["incidents"]:
|
|
71
|
+
yield f"{incident['name']}: {incident['shortlink']}"
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
@dataclasses.dataclass
|
|
75
|
+
class Cloudflare(AtlassianStatusPage):
|
|
76
|
+
"""
|
|
77
|
+
Check Cloudflare platform status via Atlassian Status Page API v2.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
timeout: Request timeout duration.
|
|
81
|
+
|
|
82
|
+
"""
|
|
83
|
+
|
|
84
|
+
timeout: datetime.timedelta = dataclasses.field(
|
|
85
|
+
default=datetime.timedelta(seconds=10), repr=False
|
|
86
|
+
)
|
|
87
|
+
base_url: typing.ClassVar[str] = "https://www.cloudflarestatus.com"
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
@dataclasses.dataclass
|
|
91
|
+
class FlyIo(AtlassianStatusPage):
|
|
92
|
+
"""
|
|
93
|
+
Check Fly.io platform status via Atlassian Status Page API v2.
|
|
94
|
+
|
|
95
|
+
Args:
|
|
96
|
+
timeout: Request timeout duration.
|
|
97
|
+
|
|
98
|
+
"""
|
|
99
|
+
|
|
100
|
+
timeout: datetime.timedelta = dataclasses.field(
|
|
101
|
+
default=datetime.timedelta(seconds=10), repr=False
|
|
102
|
+
)
|
|
103
|
+
base_url: typing.ClassVar[str] = "https://status.flyio.net"
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
@dataclasses.dataclass
|
|
107
|
+
class PlatformSh(AtlassianStatusPage):
|
|
108
|
+
"""
|
|
109
|
+
Check Platform.sh platform status via Atlassian Status Page API v2.
|
|
110
|
+
|
|
111
|
+
Args:
|
|
112
|
+
timeout: Request timeout duration.
|
|
113
|
+
|
|
114
|
+
"""
|
|
115
|
+
|
|
116
|
+
timeout: datetime.timedelta = dataclasses.field(
|
|
117
|
+
default=datetime.timedelta(seconds=10), repr=False
|
|
118
|
+
)
|
|
119
|
+
base_url: typing.ClassVar[str] = "https://status.platform.sh"
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
@dataclasses.dataclass
|
|
123
|
+
class DigitalOcean(AtlassianStatusPage):
|
|
124
|
+
"""
|
|
125
|
+
Check DigitalOcean platform status via Atlassian Status Page API v2.
|
|
126
|
+
|
|
127
|
+
Args:
|
|
128
|
+
timeout: Request timeout duration.
|
|
129
|
+
|
|
130
|
+
"""
|
|
131
|
+
|
|
132
|
+
timeout: datetime.timedelta = dataclasses.field(
|
|
133
|
+
default=datetime.timedelta(seconds=10), repr=False
|
|
134
|
+
)
|
|
135
|
+
base_url: typing.ClassVar[str] = "https://status.digitalocean.com"
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
@dataclasses.dataclass
|
|
139
|
+
class Render(AtlassianStatusPage):
|
|
140
|
+
"""
|
|
141
|
+
Check Render platform status via Atlassian Status Page API v2.
|
|
142
|
+
|
|
143
|
+
Args:
|
|
144
|
+
timeout: Request timeout duration.
|
|
145
|
+
|
|
146
|
+
"""
|
|
147
|
+
|
|
148
|
+
timeout: datetime.timedelta = dataclasses.field(
|
|
149
|
+
default=datetime.timedelta(seconds=10), repr=False
|
|
150
|
+
)
|
|
151
|
+
base_url: typing.ClassVar[str] = "https://status.render.com"
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
@dataclasses.dataclass
|
|
155
|
+
class Vercel(AtlassianStatusPage):
|
|
156
|
+
"""
|
|
157
|
+
Check Vercel platform status via Atlassian Status Page API v2.
|
|
158
|
+
|
|
159
|
+
Args:
|
|
160
|
+
timeout: Request timeout duration.
|
|
161
|
+
|
|
162
|
+
"""
|
|
163
|
+
|
|
164
|
+
timeout: datetime.timedelta = dataclasses.field(
|
|
165
|
+
default=datetime.timedelta(seconds=10), repr=False
|
|
166
|
+
)
|
|
167
|
+
base_url: typing.ClassVar[str] = "https://www.vercel-status.com"
|
|
@@ -0,0 +1,226 @@
|
|
|
1
|
+
"""RSS feed health checks for cloud provider status pages."""
|
|
2
|
+
|
|
3
|
+
import dataclasses
|
|
4
|
+
import datetime
|
|
5
|
+
import logging
|
|
6
|
+
import typing
|
|
7
|
+
|
|
8
|
+
import feedparser
|
|
9
|
+
import httpx
|
|
10
|
+
|
|
11
|
+
from health_check import HealthCheck, __version__
|
|
12
|
+
from health_check.exceptions import ServiceUnavailable, ServiceWarning
|
|
13
|
+
|
|
14
|
+
logger = logging.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class Feed(HealthCheck):
|
|
18
|
+
"""
|
|
19
|
+
Base class for cloud provider status feed health checks.
|
|
20
|
+
|
|
21
|
+
Monitor cloud provider service health via their public RSS or Atom status feeds.
|
|
22
|
+
|
|
23
|
+
Subclasses must provide:
|
|
24
|
+
|
|
25
|
+
Attributes:
|
|
26
|
+
feed_url: The full URL of the RSS or Atom feed to monitor.
|
|
27
|
+
timeout: Maximum duration to wait for the HTTP request before failing.
|
|
28
|
+
max_age: Maximum age for an incident entry to be considered active.
|
|
29
|
+
|
|
30
|
+
The `timeout` and `max_age` values are used to control how long the
|
|
31
|
+
health check waits for the feed and how far back in time incidents are
|
|
32
|
+
considered relevant. The `feed_url` is used to fetch the status feed.
|
|
33
|
+
|
|
34
|
+
Examples:
|
|
35
|
+
>>> import dataclasses
|
|
36
|
+
>>> import datetime
|
|
37
|
+
>>> import typing
|
|
38
|
+
>>> from health_check.contrib.rss import Feed
|
|
39
|
+
>>> @dataclasses.dataclass
|
|
40
|
+
... class MyProviderStatus(Feed):
|
|
41
|
+
... \"\"\"Check MyProvider status via its public RSS feed.\"\"\"
|
|
42
|
+
... timeout: datetime.timedelta = dataclasses.field(
|
|
43
|
+
... default=datetime.timedelta(seconds=10),
|
|
44
|
+
... repr=False,
|
|
45
|
+
... )
|
|
46
|
+
... max_age: datetime.timedelta = dataclasses.field(
|
|
47
|
+
... default=datetime.timedelta(hours=4),
|
|
48
|
+
... repr=False,
|
|
49
|
+
... )
|
|
50
|
+
... feed_url: typing.ClassVar[str] = "https://status.myprovider.com/feed"
|
|
51
|
+
|
|
52
|
+
"""
|
|
53
|
+
|
|
54
|
+
feed_url: typing.ClassVar[str] = NotImplemented
|
|
55
|
+
timeout: datetime.timedelta = NotImplemented
|
|
56
|
+
max_age: datetime.timedelta = NotImplemented
|
|
57
|
+
|
|
58
|
+
async def run(self):
|
|
59
|
+
logger.debug("Fetching feed from %s", self.feed_url)
|
|
60
|
+
|
|
61
|
+
async with httpx.AsyncClient() as client:
|
|
62
|
+
try:
|
|
63
|
+
response = await client.get(
|
|
64
|
+
self.feed_url,
|
|
65
|
+
headers={"User-Agent": f"django-health-check@{__version__}"},
|
|
66
|
+
timeout=self.timeout.total_seconds(),
|
|
67
|
+
follow_redirects=True,
|
|
68
|
+
)
|
|
69
|
+
except httpx.TimeoutException as e:
|
|
70
|
+
raise ServiceUnavailable("Feed request timed out") from e
|
|
71
|
+
except httpx.RequestError as e:
|
|
72
|
+
raise ServiceUnavailable(f"Failed to fetch feed: {e}") from e
|
|
73
|
+
|
|
74
|
+
try:
|
|
75
|
+
response.raise_for_status()
|
|
76
|
+
except httpx.HTTPStatusError as e:
|
|
77
|
+
raise ServiceUnavailable(
|
|
78
|
+
f"HTTP error {e.response.status_code} fetching feed from {self.feed_url!r}"
|
|
79
|
+
) from e
|
|
80
|
+
|
|
81
|
+
content = response.text
|
|
82
|
+
|
|
83
|
+
feed = feedparser.parse(content)
|
|
84
|
+
|
|
85
|
+
if feed.bozo:
|
|
86
|
+
# feedparser sets bozo=1 for malformed feeds
|
|
87
|
+
logger.warning("Feed parsing encountered errors: %s", feed.bozo_exception)
|
|
88
|
+
|
|
89
|
+
if not feed.entries:
|
|
90
|
+
logger.debug("No entries found in feed")
|
|
91
|
+
return
|
|
92
|
+
|
|
93
|
+
incidents = [entry for entry in feed.entries if self._is_recent_incident(entry)]
|
|
94
|
+
|
|
95
|
+
if incidents:
|
|
96
|
+
raise ServiceWarning(
|
|
97
|
+
f"Found {len(incidents)} recent incident(s): {
|
|
98
|
+
', '.join(
|
|
99
|
+
getattr(entry, 'title', 'Untitled incident')
|
|
100
|
+
or 'Untitled incident'
|
|
101
|
+
for entry in incidents
|
|
102
|
+
)
|
|
103
|
+
}"
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
logger.debug("No recent incidents found in feed")
|
|
107
|
+
|
|
108
|
+
def _is_recent_incident(self, entry):
|
|
109
|
+
"""Check if entry is a recent incident."""
|
|
110
|
+
published_at = self._extract_date(entry)
|
|
111
|
+
if not published_at:
|
|
112
|
+
return True
|
|
113
|
+
|
|
114
|
+
now = datetime.datetime.now(tz=datetime.timezone.utc)
|
|
115
|
+
cutoff = now - self.max_age
|
|
116
|
+
return now >= published_at > cutoff
|
|
117
|
+
|
|
118
|
+
def _extract_date(self, entry):
|
|
119
|
+
# feedparser normalizes both RSS and Atom dates to struct_time
|
|
120
|
+
# Try published first, then updated
|
|
121
|
+
for date_field in ["published_parsed", "updated_parsed"]:
|
|
122
|
+
if date_tuple := getattr(entry, date_field, None):
|
|
123
|
+
try:
|
|
124
|
+
# Convert struct_time to datetime
|
|
125
|
+
return datetime.datetime(
|
|
126
|
+
*date_tuple[:6], tzinfo=datetime.timezone.utc
|
|
127
|
+
)
|
|
128
|
+
except (ValueError, TypeError):
|
|
129
|
+
logger.warning(
|
|
130
|
+
"Failed to parse date from entry %r for %r",
|
|
131
|
+
date_tuple,
|
|
132
|
+
self.feed_url,
|
|
133
|
+
exc_info=True,
|
|
134
|
+
)
|
|
135
|
+
return None
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
@dataclasses.dataclass
|
|
139
|
+
class AWS(Feed):
|
|
140
|
+
"""
|
|
141
|
+
Check AWS service status via their public RSS status feeds.
|
|
142
|
+
|
|
143
|
+
Args:
|
|
144
|
+
region: AWS region code (e.g., 'us-east-1', 'eu-west-1').
|
|
145
|
+
service: AWS service name (e.g., 'ec2', 's3', 'rds').
|
|
146
|
+
timeout: Request timeout duration.
|
|
147
|
+
max_age: Maximum age for an incident to be considered active.
|
|
148
|
+
|
|
149
|
+
"""
|
|
150
|
+
|
|
151
|
+
region: str
|
|
152
|
+
service: str
|
|
153
|
+
timeout: datetime.timedelta = dataclasses.field(
|
|
154
|
+
default=datetime.timedelta(seconds=10), repr=False
|
|
155
|
+
)
|
|
156
|
+
max_age: datetime.timedelta = dataclasses.field(
|
|
157
|
+
default=datetime.timedelta(hours=8), repr=False
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
def __post_init__(self):
|
|
161
|
+
self.feed_url: str = (
|
|
162
|
+
f"https://status.aws.amazon.com/rss/{self.service}-{self.region}.rss"
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
@dataclasses.dataclass
|
|
167
|
+
class Heroku(Feed):
|
|
168
|
+
"""
|
|
169
|
+
Check Heroku platform status via their public RSS status feed.
|
|
170
|
+
|
|
171
|
+
Args:
|
|
172
|
+
timeout: Request timeout duration.
|
|
173
|
+
max_age: Maximum age for an incident to be considered active.
|
|
174
|
+
|
|
175
|
+
"""
|
|
176
|
+
|
|
177
|
+
timeout: datetime.timedelta = dataclasses.field(
|
|
178
|
+
default=datetime.timedelta(seconds=10), repr=False
|
|
179
|
+
)
|
|
180
|
+
max_age: datetime.timedelta = dataclasses.field(
|
|
181
|
+
default=datetime.timedelta(hours=8), repr=False
|
|
182
|
+
)
|
|
183
|
+
feed_url: typing.ClassVar[str] = "https://status.heroku.com/feed"
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
@dataclasses.dataclass
|
|
187
|
+
class Azure(Feed):
|
|
188
|
+
"""
|
|
189
|
+
Check Azure platform status via their public RSS status feed.
|
|
190
|
+
|
|
191
|
+
Args:
|
|
192
|
+
timeout: Request timeout duration.
|
|
193
|
+
max_age: Maximum age for an incident to be considered active.
|
|
194
|
+
|
|
195
|
+
"""
|
|
196
|
+
|
|
197
|
+
timeout: datetime.timedelta = dataclasses.field(
|
|
198
|
+
default=datetime.timedelta(seconds=10), repr=False
|
|
199
|
+
)
|
|
200
|
+
max_age: datetime.timedelta = dataclasses.field(
|
|
201
|
+
default=datetime.timedelta(hours=8), repr=False
|
|
202
|
+
)
|
|
203
|
+
feed_url: typing.ClassVar[str] = (
|
|
204
|
+
"https://rssfeed.azure.status.microsoft/en-us/status/feed/"
|
|
205
|
+
)
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
@dataclasses.dataclass
|
|
209
|
+
class GoogleCloud(Feed):
|
|
210
|
+
"""
|
|
211
|
+
Check Google Cloud platform status via their public Atom status feed.
|
|
212
|
+
|
|
213
|
+
Args:
|
|
214
|
+
timeout: Request timeout duration.
|
|
215
|
+
max_age: Maximum age for an incident to be considered active.
|
|
216
|
+
|
|
217
|
+
"""
|
|
218
|
+
|
|
219
|
+
timeout: datetime.timedelta = dataclasses.field(
|
|
220
|
+
default=datetime.timedelta(seconds=10), repr=False
|
|
221
|
+
)
|
|
222
|
+
max_age: datetime.timedelta = dataclasses.field(
|
|
223
|
+
default=datetime.timedelta(hours=8), repr=False
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
feed_url: typing.ClassVar[str] = "https://status.cloud.google.com/en/feed.atom"
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
import urllib.error
|
|
4
|
+
import urllib.request
|
|
5
|
+
|
|
6
|
+
from django.conf import settings
|
|
7
|
+
from django.core.management.base import BaseCommand
|
|
8
|
+
from django.urls import NoReverseMatch, reverse
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class Command(BaseCommand):
|
|
12
|
+
help = "Run health checks and exit 0 if everything went well."
|
|
13
|
+
|
|
14
|
+
@property
|
|
15
|
+
def default_forwarded_host(self):
|
|
16
|
+
return (
|
|
17
|
+
settings.ALLOWED_HOSTS[0].strip(".")
|
|
18
|
+
if settings.ALLOWED_HOSTS and settings.ALLOWED_HOSTS[0] != "*"
|
|
19
|
+
else None
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
@property
|
|
23
|
+
def default_addrport(self):
|
|
24
|
+
return ":".join([os.getenv("HOST", "127.0.0.1"), os.getenv("PORT", "8000")])
|
|
25
|
+
|
|
26
|
+
def add_arguments(self, parser):
|
|
27
|
+
parser.add_argument(
|
|
28
|
+
"endpoint",
|
|
29
|
+
type=str,
|
|
30
|
+
help="URL-pattern name of health check endpoint to test",
|
|
31
|
+
)
|
|
32
|
+
parser.add_argument(
|
|
33
|
+
"addrport",
|
|
34
|
+
nargs="?",
|
|
35
|
+
type=str,
|
|
36
|
+
default=self.default_addrport,
|
|
37
|
+
help=f"Optional port number, or ipaddr:port (default: {self.default_addrport})",
|
|
38
|
+
)
|
|
39
|
+
parser.add_argument(
|
|
40
|
+
"--forwarded-host",
|
|
41
|
+
type=str,
|
|
42
|
+
default=self.default_forwarded_host,
|
|
43
|
+
help=f"Value for X-Forwarded-Host header (default: {self.default_forwarded_host})",
|
|
44
|
+
)
|
|
45
|
+
parser.add_argument(
|
|
46
|
+
"--forwarded-proto",
|
|
47
|
+
type=str,
|
|
48
|
+
choices=["http", "https"],
|
|
49
|
+
default="https",
|
|
50
|
+
help="Value for X-Forwarded-Proto header (default: https)",
|
|
51
|
+
)
|
|
52
|
+
parser.add_argument(
|
|
53
|
+
"--timeout",
|
|
54
|
+
type=int,
|
|
55
|
+
default=5,
|
|
56
|
+
help="Timeout in seconds for the health check request (default: 5 seconds)",
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
def handle(self, *args, **options):
|
|
60
|
+
endpoint = options.get("endpoint")
|
|
61
|
+
try:
|
|
62
|
+
path = reverse(endpoint)
|
|
63
|
+
except NoReverseMatch as e:
|
|
64
|
+
self.stderr.write(
|
|
65
|
+
f"Could not resolve endpoint {endpoint!r}: {e}\n"
|
|
66
|
+
"Please provide a valid URL pattern name for the health check endpoint."
|
|
67
|
+
)
|
|
68
|
+
sys.exit(2)
|
|
69
|
+
addrport = options.get("addrport")
|
|
70
|
+
# Use HTTPS only when SSL redirect is enabled without forwarded headers (direct HTTPS required).
|
|
71
|
+
# Otherwise use HTTP (typical for containers with X-Forwarded-Proto header support).
|
|
72
|
+
proto = (
|
|
73
|
+
"https"
|
|
74
|
+
if settings.SECURE_SSL_REDIRECT and not settings.USE_X_FORWARDED_HOST
|
|
75
|
+
else "http"
|
|
76
|
+
)
|
|
77
|
+
url = f"{proto}://{addrport}{path}"
|
|
78
|
+
|
|
79
|
+
headers = {"Accept": "text/plain"}
|
|
80
|
+
|
|
81
|
+
# Add X-Forwarded-Host header
|
|
82
|
+
if forwarded_host := options.get("forwarded_host"):
|
|
83
|
+
headers["X-Forwarded-Host"] = forwarded_host
|
|
84
|
+
|
|
85
|
+
# Add X-Forwarded-Proto header
|
|
86
|
+
if forwarded_proto := options.get("forwarded_proto"):
|
|
87
|
+
headers["X-Forwarded-Proto"] = forwarded_proto
|
|
88
|
+
|
|
89
|
+
if options.get("verbosity", 1) >= 2:
|
|
90
|
+
self.stdout.write(
|
|
91
|
+
f"Checking health endpoint at {url!r} with headers: {headers}"
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
request = urllib.request.Request(url, headers=headers) # noqa: S310
|
|
95
|
+
try:
|
|
96
|
+
response = urllib.request.urlopen(request, timeout=options["timeout"]) # noqa: S310
|
|
97
|
+
except urllib.error.HTTPError as e:
|
|
98
|
+
match e.code:
|
|
99
|
+
case 500: # Health check failed
|
|
100
|
+
self.stdout.write(e.read().decode("utf-8"))
|
|
101
|
+
sys.exit(1)
|
|
102
|
+
case 400:
|
|
103
|
+
self.stderr.write(
|
|
104
|
+
f"{url!r} is not reachable: {e.reason}\nPlease check your ALLOWED_HOSTS setting or use the --forwarded-host option."
|
|
105
|
+
)
|
|
106
|
+
sys.exit(2)
|
|
107
|
+
case _:
|
|
108
|
+
self.stderr.write(
|
|
109
|
+
"Unexpected HTTP error "
|
|
110
|
+
f"when trying to reach {url!r}: {e}\n"
|
|
111
|
+
f"You may have selected an invalid endpoint {endpoint!r}"
|
|
112
|
+
f" or another application is running on {addrport!r}."
|
|
113
|
+
)
|
|
114
|
+
sys.exit(2)
|
|
115
|
+
except urllib.error.URLError as e:
|
|
116
|
+
self.stderr.write(
|
|
117
|
+
f"{url!r} is not reachable: {e.reason}\nPlease check your server is running and reachable."
|
|
118
|
+
)
|
|
119
|
+
sys.exit(2)
|
|
120
|
+
except TimeoutError as e:
|
|
121
|
+
self.stderr.write(f"Timeout when trying to reach {url!r}: {e}")
|
|
122
|
+
sys.exit(2)
|
|
123
|
+
else:
|
|
124
|
+
self.stdout.write(response.read().decode("utf-8"))
|
|
@@ -48,7 +48,8 @@ celery = ["celery>=5.0.0"]
|
|
|
48
48
|
kafka = ["confluent-kafka>=2.0.0"]
|
|
49
49
|
rabbitmq = ["aio-pika>=9.0.0"]
|
|
50
50
|
redis = ["redis>=4.2.0"]
|
|
51
|
-
rss = ["httpx>=0.27.0"]
|
|
51
|
+
rss = ["httpx>=0.27.0", "feedparser>=6.0.0"]
|
|
52
|
+
atlassian = ["httpx>=0.27.0"]
|
|
52
53
|
|
|
53
54
|
[project.urls]
|
|
54
55
|
# https://packaging.python.org/en/latest/specifications/well-known-project-urls/#well-known-labels
|
|
@@ -1,113 +0,0 @@
|
|
|
1
|
-
"""RSS feed health checks for cloud provider status pages."""
|
|
2
|
-
|
|
3
|
-
import dataclasses
|
|
4
|
-
import datetime
|
|
5
|
-
import email.utils
|
|
6
|
-
import logging
|
|
7
|
-
from xml.etree import ElementTree
|
|
8
|
-
|
|
9
|
-
import httpx
|
|
10
|
-
|
|
11
|
-
from health_check.base import HealthCheck
|
|
12
|
-
from health_check.exceptions import ServiceUnavailable, ServiceWarning
|
|
13
|
-
|
|
14
|
-
logger = logging.getLogger(__name__)
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
@dataclasses.dataclass
|
|
18
|
-
class AWS(HealthCheck):
|
|
19
|
-
"""
|
|
20
|
-
Check AWS service status via their public RSS status feeds.
|
|
21
|
-
|
|
22
|
-
Args:
|
|
23
|
-
region: AWS region code (e.g., 'us-east-1', 'eu-west-1').
|
|
24
|
-
service: AWS service name (e.g., 'ec2', 's3', 'rds').
|
|
25
|
-
timeout: Request timeout duration.
|
|
26
|
-
max_age: Maximum age for an incident to be considered active.
|
|
27
|
-
|
|
28
|
-
"""
|
|
29
|
-
|
|
30
|
-
region: str
|
|
31
|
-
service: str
|
|
32
|
-
timeout: datetime.timedelta = dataclasses.field(
|
|
33
|
-
default=datetime.timedelta(seconds=10), repr=False
|
|
34
|
-
)
|
|
35
|
-
max_age: datetime.timedelta = dataclasses.field(
|
|
36
|
-
default=datetime.timedelta(days=1), repr=False
|
|
37
|
-
)
|
|
38
|
-
|
|
39
|
-
def __post_init__(self):
|
|
40
|
-
self.feed_url: str = (
|
|
41
|
-
f"https://status.aws.amazon.com/rss/{self.service}-{self.region}.rss"
|
|
42
|
-
)
|
|
43
|
-
|
|
44
|
-
async def run(self):
|
|
45
|
-
"""Check the RSS feed for incidents."""
|
|
46
|
-
logger.debug("Fetching feed from %s", self.feed_url)
|
|
47
|
-
|
|
48
|
-
async with httpx.AsyncClient() as client:
|
|
49
|
-
try:
|
|
50
|
-
response = await client.get(
|
|
51
|
-
self.feed_url,
|
|
52
|
-
headers={"User-Agent": "django-health-check"},
|
|
53
|
-
timeout=self.timeout.total_seconds(),
|
|
54
|
-
follow_redirects=True,
|
|
55
|
-
)
|
|
56
|
-
except httpx.TimeoutException as e:
|
|
57
|
-
raise ServiceUnavailable("RSS feed request timed out") from e
|
|
58
|
-
except httpx.RequestError as e:
|
|
59
|
-
raise ServiceUnavailable(f"Failed to fetch RSS feed: {e}") from e
|
|
60
|
-
|
|
61
|
-
try:
|
|
62
|
-
response.raise_for_status()
|
|
63
|
-
except httpx.HTTPStatusError as e:
|
|
64
|
-
raise ServiceUnavailable(
|
|
65
|
-
f"HTTP error {e.response.status_code} fetching RSS feed"
|
|
66
|
-
) from e
|
|
67
|
-
|
|
68
|
-
content = response.text
|
|
69
|
-
|
|
70
|
-
try:
|
|
71
|
-
root = ElementTree.fromstring(content) # noqa: S314
|
|
72
|
-
except ElementTree.ParseError as e:
|
|
73
|
-
raise ServiceUnavailable("Failed to parse RSS feed") from e
|
|
74
|
-
|
|
75
|
-
entries = self._extract_entries(root)
|
|
76
|
-
incidents = [entry for entry in entries if self._is_recent_incident(entry)]
|
|
77
|
-
|
|
78
|
-
if incidents:
|
|
79
|
-
incident_titles = [self._extract_title(entry) for entry in incidents]
|
|
80
|
-
raise ServiceWarning(
|
|
81
|
-
f"Found {len(incidents)} recent incident(s): {', '.join(incident_titles)}"
|
|
82
|
-
)
|
|
83
|
-
|
|
84
|
-
logger.debug("No recent incidents found in RSS feed")
|
|
85
|
-
|
|
86
|
-
def _extract_entries(self, root):
|
|
87
|
-
"""Extract entries from RSS 2.0 feed."""
|
|
88
|
-
return root.findall(".//item")
|
|
89
|
-
|
|
90
|
-
def _is_recent_incident(self, entry):
|
|
91
|
-
"""Check if entry is a recent incident."""
|
|
92
|
-
published_at = self._extract_date(entry)
|
|
93
|
-
if not published_at:
|
|
94
|
-
return True
|
|
95
|
-
|
|
96
|
-
cutoff = datetime.datetime.now(tz=datetime.timezone.utc) - self.max_age
|
|
97
|
-
return published_at > cutoff
|
|
98
|
-
|
|
99
|
-
def _extract_date(self, entry):
|
|
100
|
-
"""Extract publication date from RSS entry."""
|
|
101
|
-
pub_date = entry.find("pubDate")
|
|
102
|
-
if pub_date is not None and (date_text := pub_date.text):
|
|
103
|
-
try:
|
|
104
|
-
return email.utils.parsedate_to_datetime(date_text)
|
|
105
|
-
except (ValueError, TypeError):
|
|
106
|
-
pass
|
|
107
|
-
|
|
108
|
-
def _extract_title(self, entry):
|
|
109
|
-
"""Extract title from RSS entry."""
|
|
110
|
-
if (title := entry.find("title")) is not None:
|
|
111
|
-
return title.text or "Untitled incident"
|
|
112
|
-
|
|
113
|
-
return "Untitled incident"
|
|
@@ -1,46 +0,0 @@
|
|
|
1
|
-
import sys
|
|
2
|
-
import urllib.error
|
|
3
|
-
import urllib.request
|
|
4
|
-
|
|
5
|
-
from django.core.management.base import BaseCommand
|
|
6
|
-
from django.urls import reverse
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
class Command(BaseCommand):
|
|
10
|
-
help = "Run health checks and exit 0 if everything went well."
|
|
11
|
-
|
|
12
|
-
def add_arguments(self, parser):
|
|
13
|
-
parser.add_argument(
|
|
14
|
-
"endpoint",
|
|
15
|
-
type=str,
|
|
16
|
-
help="URL-pattern name of health check endpoint to test",
|
|
17
|
-
)
|
|
18
|
-
parser.add_argument(
|
|
19
|
-
"addrport",
|
|
20
|
-
nargs="?",
|
|
21
|
-
type=str,
|
|
22
|
-
help="Optional port number, or ipaddr:port (default: localhost:8000)",
|
|
23
|
-
default="localhost:8000",
|
|
24
|
-
)
|
|
25
|
-
|
|
26
|
-
def handle(self, *args, **options):
|
|
27
|
-
endpoint = options.get("endpoint")
|
|
28
|
-
path = reverse(endpoint)
|
|
29
|
-
host, sep, port = options.get("addrport").partition(":")
|
|
30
|
-
url = f"http://{host}:{port}{path}" if sep else f"http://{host}{path}"
|
|
31
|
-
request = urllib.request.Request( # noqa: S310
|
|
32
|
-
url, headers={"Accept": "text/plain"}
|
|
33
|
-
)
|
|
34
|
-
try:
|
|
35
|
-
response = urllib.request.urlopen(request) # noqa: S310
|
|
36
|
-
except urllib.error.HTTPError as e:
|
|
37
|
-
# 500 status codes will raise HTTPError
|
|
38
|
-
self.stdout.write(e.read().decode("utf-8"))
|
|
39
|
-
sys.exit(1)
|
|
40
|
-
except urllib.error.URLError as e:
|
|
41
|
-
self.stderr.write(
|
|
42
|
-
f'"{url}" is not reachable: {e.reason}\nPlease check your ALLOWED_HOSTS setting.'
|
|
43
|
-
)
|
|
44
|
-
sys.exit(2)
|
|
45
|
-
else:
|
|
46
|
-
self.stdout.write(response.read().decode("utf-8"))
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{django_health_check-4.0rc2 → django_health_check-4.0rc4}/health_check/management/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|