api-graveyard 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,62 @@
1
+ import atexit
2
+ import signal
3
+ import logging
4
+ from typing import Optional
5
+
6
+ from .batcher import Batcher
7
+ from . import interceptor
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+ _batcher: Optional[Batcher] = None
12
+
13
+
14
+ def init(
15
+ api_key: str,
16
+ project_id: str,
17
+ base_url: str = "https://api-graveyard.com",
18
+ service_name: Optional[str] = None,
19
+ environment: Optional[str] = None,
20
+ flush_interval_s: float = 10.0,
21
+ max_batch_size: int = 100,
22
+ debug: bool = False,
23
+ ) -> None:
24
+ global _batcher
25
+
26
+ if _batcher is not None:
27
+ return
28
+
29
+ _batcher = Batcher(
30
+ api_key=api_key,
31
+ project_id=project_id,
32
+ base_url=base_url,
33
+ flush_interval_s=flush_interval_s,
34
+ max_batch_size=max_batch_size,
35
+ debug=debug,
36
+ )
37
+
38
+ options = {
39
+ "base_url": base_url,
40
+ "service_name": service_name,
41
+ "environment": environment or "production",
42
+ }
43
+ interceptor.patch(_batcher, options)
44
+
45
+ atexit.register(shutdown)
46
+
47
+ def _sigterm_handler(signum, frame):
48
+ shutdown()
49
+
50
+ try:
51
+ signal.signal(signal.SIGTERM, _sigterm_handler)
52
+ except (OSError, ValueError):
53
+ pass
54
+
55
+
56
+ def shutdown() -> None:
57
+ global _batcher
58
+
59
+ interceptor.unpatch()
60
+ if _batcher is not None:
61
+ _batcher.shutdown()
62
+ _batcher = None
@@ -0,0 +1,89 @@
1
+ import json
2
+ import logging
3
+ import threading
4
+ import urllib.request
5
+ from datetime import datetime, timezone
6
+ from typing import List, Optional
7
+
8
+ from .types import HttpEvent
9
+
10
+ logger = logging.getLogger(__name__)
11
+
12
+ _SELF_HOSTNAMES = {"api-graveyard.com", "www.api-graveyard.com"}
13
+
14
+
15
+ class Batcher:
16
+ def __init__(
17
+ self,
18
+ api_key: str,
19
+ project_id: str,
20
+ base_url: str = "https://api-graveyard.com",
21
+ flush_interval_s: float = 10.0,
22
+ max_batch_size: int = 100,
23
+ debug: bool = False,
24
+ ) -> None:
25
+ self._api_key = api_key
26
+ self._project_id = project_id
27
+ self._endpoint = f"{base_url}/api/v1/projects/{project_id}/ingest/events/"
28
+ self._max_batch_size = max_batch_size
29
+ self._debug = debug
30
+ self._queue: List[HttpEvent] = []
31
+ self._lock = threading.Lock()
32
+ self._shutdown = False
33
+ self._timer: Optional[threading.Timer] = None
34
+ self._start_timer(flush_interval_s)
35
+ self._flush_interval_s = flush_interval_s
36
+
37
+ def _start_timer(self, interval: float) -> None:
38
+ self._timer = threading.Timer(interval, self._timer_flush)
39
+ self._timer.daemon = True
40
+ self._timer.start()
41
+
42
+ def _timer_flush(self) -> None:
43
+ if self._shutdown:
44
+ return
45
+ self.flush()
46
+ self._start_timer(self._flush_interval_s)
47
+
48
+ def push(self, event: HttpEvent) -> None:
49
+ with self._lock:
50
+ self._queue.append(event)
51
+ should_flush = len(self._queue) >= self._max_batch_size
52
+
53
+ if should_flush:
54
+ self.flush()
55
+
56
+ def flush(self) -> None:
57
+ with self._lock:
58
+ if not self._queue:
59
+ return
60
+ batch = self._queue[:]
61
+ self._queue.clear()
62
+
63
+ self._send(batch)
64
+
65
+ def shutdown(self) -> None:
66
+ self._shutdown = True
67
+ if self._timer:
68
+ self._timer.cancel()
69
+ self.flush()
70
+
71
+ def _send(self, events: List[HttpEvent]) -> None:
72
+ try:
73
+ body = json.dumps(events).encode("utf-8")
74
+ req = urllib.request.Request(
75
+ self._endpoint,
76
+ data=body,
77
+ headers={
78
+ "Content-Type": "application/json",
79
+ "X-API-Key": self._api_key,
80
+ "User-Agent": "api-graveyard-python/0.1.0",
81
+ },
82
+ method="POST",
83
+ )
84
+ with urllib.request.urlopen(req, timeout=10) as resp:
85
+ if self._debug:
86
+ logger.debug(f"[api-graveyard] flushed {len(events)} events → {resp.status}")
87
+ except Exception as e:
88
+ if self._debug:
89
+ logger.debug(f"[api-graveyard] flush failed: {e}")
@@ -0,0 +1,93 @@
1
+ import time
2
+ import logging
3
+ from datetime import datetime, timezone
4
+ from typing import Optional, Set
5
+ from urllib.parse import urlparse
6
+
7
+ logger = logging.getLogger(__name__)
8
+
9
+ _SELF_HOSTNAMES: Set[str] = {"api-graveyard.com", "www.api-graveyard.com"}
10
+ _IGNORED_HOSTNAMES: Set[str] = set()
11
+ _patched = False
12
+ _original_urlopen = None
13
+ _batcher = None
14
+ _service_name: Optional[str] = None
15
+ _environment: Optional[str] = None
16
+
17
+
18
+ def patch(batcher, options: dict) -> None:
19
+ global _patched, _original_urlopen, _batcher, _service_name, _environment, _IGNORED_HOSTNAMES
20
+
21
+ if _patched:
22
+ return
23
+
24
+ try:
25
+ import urllib3
26
+ except ImportError:
27
+ logger.warning("[api-graveyard] urllib3 not found — HTTP interception disabled")
28
+ return
29
+
30
+ _batcher = batcher
31
+ _service_name = options.get("service_name")
32
+ _environment = options.get("environment", "production")
33
+
34
+ from urllib.parse import urlparse as _up
35
+ base_url = options.get("base_url", "https://api-graveyard.com")
36
+ base_host = _up(base_url).hostname or "api-graveyard.com"
37
+ _IGNORED_HOSTNAMES = _SELF_HOSTNAMES | {base_host, "localhost", "127.0.0.1", "::1"}
38
+
39
+ _original_urlopen = urllib3.HTTPConnectionPool.urlopen
40
+
41
+ def patched_urlopen(pool, method, url, **kwargs):
42
+ scheme = "https" if isinstance(pool, urllib3.HTTPSConnectionPool) else "http"
43
+ port = pool.port
44
+ default_port = 443 if scheme == "https" else 80
45
+ port_str = f":{port}" if port and port != default_port else ""
46
+ full_url = f"{scheme}://{pool.host}{port_str}{url}"
47
+
48
+ if pool.host in _IGNORED_HOSTNAMES:
49
+ return _original_urlopen(pool, method, url, **kwargs)
50
+
51
+ start = time.monotonic()
52
+ status: Optional[int] = None
53
+ try:
54
+ response = _original_urlopen(pool, method, url, **kwargs)
55
+ status = response.status
56
+ return response
57
+ except Exception:
58
+ raise
59
+ finally:
60
+ event = {
61
+ "ts": datetime.now(timezone.utc).isoformat(),
62
+ "method": method.upper(),
63
+ "url": full_url,
64
+ "duration_ms": int((time.monotonic() - start) * 1000),
65
+ }
66
+ if status is not None:
67
+ event["status"] = status
68
+ if _service_name:
69
+ event["service_name"] = _service_name
70
+ if _environment:
71
+ event["environment"] = _environment
72
+ try:
73
+ _batcher.push(event)
74
+ except Exception:
75
+ pass
76
+
77
+ urllib3.HTTPConnectionPool.urlopen = patched_urlopen
78
+ _patched = True
79
+
80
+
81
+ def unpatch() -> None:
82
+ global _patched
83
+
84
+ if not _patched or _original_urlopen is None:
85
+ return
86
+
87
+ try:
88
+ import urllib3
89
+ urllib3.HTTPConnectionPool.urlopen = _original_urlopen
90
+ except ImportError:
91
+ pass
92
+
93
+ _patched = False
api_graveyard/types.py ADDED
@@ -0,0 +1,11 @@
1
+ from typing import Optional, TypedDict
2
+
3
+
4
+ class HttpEvent(TypedDict, total=False):
5
+ ts: str
6
+ method: str
7
+ url: str
8
+ status: Optional[int]
9
+ duration_ms: Optional[int]
10
+ service_name: Optional[str]
11
+ environment: Optional[str]
@@ -0,0 +1,114 @@
1
+ Metadata-Version: 2.4
2
+ Name: api-graveyard
3
+ Version: 0.1.0
4
+ Summary: Official Python collector for API Graveyard — automatically tracks your outgoing HTTP dependencies
5
+ License: MIT
6
+ Project-URL: Homepage, https://api-graveyard.com
7
+ Project-URL: Repository, https://github.com/Shakargy/api-graveyard-python
8
+ Keywords: api,monitoring,dependency,tracking,http,observability
9
+ Classifier: Development Status :: 4 - Beta
10
+ Classifier: Intended Audience :: Developers
11
+ Classifier: License :: OSI Approved :: MIT License
12
+ Classifier: Programming Language :: Python :: 3
13
+ Classifier: Programming Language :: Python :: 3.8
14
+ Classifier: Programming Language :: Python :: 3.9
15
+ Classifier: Programming Language :: Python :: 3.10
16
+ Classifier: Programming Language :: Python :: 3.11
17
+ Classifier: Programming Language :: Python :: 3.12
18
+ Classifier: Topic :: Software Development :: Libraries
19
+ Requires-Python: >=3.8
20
+ Description-Content-Type: text/markdown
21
+ Requires-Dist: urllib3>=1.26
22
+ Provides-Extra: dev
23
+ Requires-Dist: pytest>=7; extra == "dev"
24
+ Requires-Dist: urllib3>=2; extra == "dev"
25
+
26
+ # api-graveyard
27
+
28
+ Official Python collector for [API Graveyard](https://api-graveyard.com) — automatically tracks your outgoing HTTP dependencies, detects risk events, and surfaces zombie APIs before they take down your product.
29
+
30
+ ## Install
31
+
32
+ ```bash
33
+ pip install api-graveyard
34
+ ```
35
+
36
+ ## Quick start
37
+
38
+ Add one call to the top of your app entry point:
39
+
40
+ ```python
41
+ import api_graveyard
42
+
43
+ api_graveyard.init(
44
+ api_key="agk_your_key_here",
45
+ project_id="your-project-id",
46
+ )
47
+
48
+ # ... rest of your app
49
+ ```
50
+
51
+ That's it. Every outgoing HTTP/HTTPS request your app makes is now automatically captured, batched, and sent to API Graveyard in the background.
52
+
53
+ ## Works with
54
+
55
+ - `requests`
56
+ - `httpx` (sync mode)
57
+ - `urllib3` directly
58
+ - Any library that uses `urllib3` under the hood
59
+
60
+ ## Options
61
+
62
+ | Option | Type | Default | Description |
63
+ |--------|------|---------|-------------|
64
+ | `api_key` | `str` | **required** | Your `agk_...` API key from the dashboard |
65
+ | `project_id` | `str` | **required** | Your project ID from the dashboard |
66
+ | `base_url` | `str` | `https://api-graveyard.com` | Override for self-hosted |
67
+ | `service_name` | `str` | `None` | Tag events with a service name |
68
+ | `environment` | `str` | `"production"` | Tag events with an environment |
69
+ | `flush_interval_s` | `float` | `10.0` | How often to flush the event buffer (seconds) |
70
+ | `max_batch_size` | `int` | `100` | Max events per batch before forcing a flush |
71
+ | `debug` | `bool` | `False` | Log flush activity |
72
+
73
+ ## Django / Flask example
74
+
75
+ ```python
76
+ # manage.py or wsgi.py / app factory
77
+ import api_graveyard
78
+
79
+ api_graveyard.init(
80
+ api_key=os.environ["API_GRAVEYARD_KEY"],
81
+ project_id=os.environ["API_GRAVEYARD_PROJECT_ID"],
82
+ service_name="django-backend",
83
+ environment=os.environ.get("DJANGO_ENV", "production"),
84
+ )
85
+ ```
86
+
87
+ ## Graceful shutdown
88
+
89
+ The collector automatically flushes on process exit. For manual control:
90
+
91
+ ```python
92
+ import api_graveyard
93
+
94
+ api_graveyard.shutdown()
95
+ ```
96
+
97
+ ## What gets captured
98
+
99
+ - Method, URL, HTTP status code, response time
100
+ - Timestamp of each request
101
+ - Service name and environment (if configured)
102
+
103
+ Requests to `api-graveyard.com` itself are never captured to avoid infinite loops.
104
+ Localhost and `127.0.0.1` requests are ignored by default.
105
+
106
+ ## Get your API key
107
+
108
+ 1. Sign up at [api-graveyard.com](https://api-graveyard.com)
109
+ 2. Create a project
110
+ 3. Go to **Settings → Integrations** and create an API key
111
+
112
+ ## License
113
+
114
+ MIT
@@ -0,0 +1,8 @@
1
+ api_graveyard/__init__.py,sha256=YDVxmCaYvjnQeazkGTYr9V14NdDsrukkBmRR534ASUc,1308
2
+ api_graveyard/batcher.py,sha256=mwWSyzt62WjtYrvDwySQeq8oNDApfJ-4ajWyBqYE3aw,2744
3
+ api_graveyard/interceptor.py,sha256=TWS6TWnzm-nW0QdbBKt_CiU1pfmJ01PINcqmoA9akpQ,2887
4
+ api_graveyard/types.py,sha256=_OzrA7XbQXXTHRqcKcnEfIMyIiTedDsO9r_6-yz7mOo,243
5
+ api_graveyard-0.1.0.dist-info/METADATA,sha256=hwS-U7tLc78dqKGxV5T87QQHuQgrdWVWw_9ND9WFST4,3586
6
+ api_graveyard-0.1.0.dist-info/WHEEL,sha256=aeYiig01lYGDzBgS8HxWXOg3uV61G9ijOsup-k9o1sk,91
7
+ api_graveyard-0.1.0.dist-info/top_level.txt,sha256=-FfW1IBmQxzShMo6b09NB7C7TBbFjjz3MOfkjauDazU,14
8
+ api_graveyard-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (82.0.1)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1 @@
1
+ api_graveyard