devlogs 2.2.9__tar.gz → 2.3.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {devlogs-2.2.9/src/devlogs.egg-info → devlogs-2.3.0}/PKG-INFO +1 -1
- {devlogs-2.2.9 → devlogs-2.3.0}/pyproject.toml +1 -1
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/_version_static.py +1 -1
- devlogs-2.3.0/src/devlogs/collector/loki_plugin.py +182 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/collector/server.py +7 -0
- devlogs-2.3.0/src/devlogs/loki/__init__.py +1 -0
- devlogs-2.3.0/src/devlogs/loki/queries.py +332 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/mcp/server.py +142 -0
- {devlogs-2.2.9 → devlogs-2.3.0/src/devlogs.egg-info}/PKG-INFO +1 -1
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs.egg-info/SOURCES.txt +3 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/tests/test_build_info.py +20 -10
- {devlogs-2.2.9 → devlogs-2.3.0}/LICENSE +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/MANIFEST.in +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/README.md +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/setup.cfg +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/__init__.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/__main__.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/build_info.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/cli.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/collector/__init__.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/collector/auth.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/collector/cli.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/collector/errors.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/collector/forwarder.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/collector/ingestor.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/collector/plugins.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/collector/schema.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/config.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/context.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/demo.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/devlogs_client.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/formatting.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/handler.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/jenkins/__init__.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/jenkins/cli.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/jenkins/core.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/levels.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/mcp/__init__.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/opensearch/__init__.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/opensearch/client.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/opensearch/indexing.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/opensearch/mappings.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/opensearch/queries.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/retention.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/scrub.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/time_utils.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/version.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/web/__init__.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/web/server.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/web/static/devlogs.css +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/web/static/devlogs.js +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/web/static/index.html +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs/wrapper.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs.egg-info/dependency_links.txt +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs.egg-info/entry_points.txt +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs.egg-info/requires.txt +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/src/devlogs.egg-info/top_level.txt +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/tests/test_cli.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/tests/test_collector_auth.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/tests/test_collector_config.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/tests/test_collector_plugins.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/tests/test_collector_schema.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/tests/test_collector_server.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/tests/test_config.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/tests/test_context.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/tests/test_devlogs_client.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/tests/test_formatting.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/tests/test_handler.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/tests/test_indexing.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/tests/test_levels.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/tests/test_mappings.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/tests/test_mcp_server.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/tests/test_opensearch_client.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/tests/test_opensearch_queries.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/tests/test_retention.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/tests/test_scrub.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/tests/test_time_utils.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/tests/test_url_parsing.py +0 -0
- {devlogs-2.2.9 → devlogs-2.3.0}/tests/test_web.py +0 -0
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
# AUTO-GENERATED at build time — do not edit or commit
|
|
2
|
-
__version__ = "2.
|
|
2
|
+
__version__ = "2.3.0"
|
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
# Loki output plugin for the devlogs collector
|
|
2
|
+
#
|
|
3
|
+
# Activated by setting DEVLOGS_FORWARD_URL=loki://host:3100 (or lokis:// for TLS).
|
|
4
|
+
# Converts validated DevlogsRecord objects into Loki stream push payloads.
|
|
5
|
+
#
|
|
6
|
+
# Label strategy (low-cardinality only):
|
|
7
|
+
# application, component, level, area, environment
|
|
8
|
+
#
|
|
9
|
+
# Everything else (message, operation_id, fields, etc.) is stored as a JSON
|
|
10
|
+
# log line and accessible via Loki's | json pipeline.
|
|
11
|
+
|
|
12
|
+
import json
|
|
13
|
+
import time
|
|
14
|
+
import urllib.error
|
|
15
|
+
import urllib.request
|
|
16
|
+
from datetime import datetime, timezone
|
|
17
|
+
from typing import Any, Dict, List
|
|
18
|
+
from urllib.parse import urlparse
|
|
19
|
+
|
|
20
|
+
from .errors import PluginError
|
|
21
|
+
from .plugins import OutputPlugin, register_plugin
|
|
22
|
+
from .schema import DevlogsRecord
|
|
23
|
+
|
|
24
|
+
# Labels promoted to Loki stream labels (kept low-cardinality)
|
|
25
|
+
_LOKI_LABEL_FIELDS = ("application", "component", "level", "area", "environment")
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def _record_to_ns(record: DevlogsRecord) -> int:
|
|
29
|
+
"""Convert record timestamp to Unix nanoseconds for Loki."""
|
|
30
|
+
ts_str = record.timestamp or record.collected_ts
|
|
31
|
+
if not ts_str:
|
|
32
|
+
return int(time.time() * 1e9)
|
|
33
|
+
try:
|
|
34
|
+
# fromisoformat() doesn't accept 'Z' suffix in Python < 3.11
|
|
35
|
+
ts_clean = ts_str
|
|
36
|
+
if ts_clean.endswith("Z"):
|
|
37
|
+
ts_clean = ts_clean[:-1] + "+00:00"
|
|
38
|
+
dt = datetime.fromisoformat(ts_clean)
|
|
39
|
+
if dt.tzinfo is None:
|
|
40
|
+
dt = dt.replace(tzinfo=timezone.utc)
|
|
41
|
+
return int(dt.timestamp() * 1e9)
|
|
42
|
+
except Exception:
|
|
43
|
+
return int(time.time() * 1e9)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _build_log_line(record: DevlogsRecord) -> str:
|
|
47
|
+
"""Serialize the log record as a JSON log line for Loki storage."""
|
|
48
|
+
payload: Dict[str, Any] = {}
|
|
49
|
+
if record.message is not None:
|
|
50
|
+
payload["message"] = record.message
|
|
51
|
+
if record.operation_id is not None:
|
|
52
|
+
payload["operation_id"] = record.operation_id
|
|
53
|
+
payload["timestamp"] = record.timestamp
|
|
54
|
+
if record.collected_ts is not None:
|
|
55
|
+
payload["collected_ts"] = record.collected_ts
|
|
56
|
+
if record.version is not None:
|
|
57
|
+
payload["version"] = record.version
|
|
58
|
+
if record.fields:
|
|
59
|
+
payload["fields"] = record.fields
|
|
60
|
+
return json.dumps(payload, ensure_ascii=False)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def _stream_key(record: DevlogsRecord) -> tuple:
|
|
64
|
+
"""Return a hashable key representing this record's Loki stream labels."""
|
|
65
|
+
labels = []
|
|
66
|
+
if record.application:
|
|
67
|
+
labels.append(("application", record.application))
|
|
68
|
+
if record.component:
|
|
69
|
+
labels.append(("component", record.component))
|
|
70
|
+
if record.level:
|
|
71
|
+
labels.append(("level", record.level.lower()))
|
|
72
|
+
if record.area:
|
|
73
|
+
labels.append(("area", record.area))
|
|
74
|
+
if record.environment:
|
|
75
|
+
labels.append(("environment", record.environment))
|
|
76
|
+
return tuple(labels)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
class LokiOutputPlugin(OutputPlugin):
|
|
80
|
+
"""Pushes log records to Grafana Loki via the HTTP push API.
|
|
81
|
+
|
|
82
|
+
URL schemes:
|
|
83
|
+
loki://host:port — plain HTTP
|
|
84
|
+
lokis://host:port — HTTPS
|
|
85
|
+
"""
|
|
86
|
+
|
|
87
|
+
name = "loki"
|
|
88
|
+
schemes = ["loki", "lokis"]
|
|
89
|
+
|
|
90
|
+
def __init__(self, url: str, cfg: Any):
|
|
91
|
+
parsed = urlparse(url)
|
|
92
|
+
scheme = "https" if parsed.scheme == "lokis" else "http"
|
|
93
|
+
host = parsed.hostname or "localhost"
|
|
94
|
+
port = parsed.port or 3100
|
|
95
|
+
base = f"{scheme}://{host}:{port}"
|
|
96
|
+
self._push_url = f"{base}/loki/api/v1/push"
|
|
97
|
+
self._ready_url = f"{base}/ready"
|
|
98
|
+
self._display_url = url # original url for display_info
|
|
99
|
+
|
|
100
|
+
def send(self, records: List[DevlogsRecord]) -> Dict[str, Any]:
|
|
101
|
+
"""Push records to Loki, grouped into streams by label combination."""
|
|
102
|
+
streams: Dict[tuple, list] = {}
|
|
103
|
+
for record in records:
|
|
104
|
+
key = _stream_key(record)
|
|
105
|
+
if key not in streams:
|
|
106
|
+
streams[key] = []
|
|
107
|
+
ns = _record_to_ns(record)
|
|
108
|
+
line = _build_log_line(record)
|
|
109
|
+
streams[key].append([str(ns), line])
|
|
110
|
+
|
|
111
|
+
payload = {
|
|
112
|
+
"streams": [
|
|
113
|
+
{"stream": dict(key), "values": values}
|
|
114
|
+
for key, values in streams.items()
|
|
115
|
+
]
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
body = json.dumps(payload).encode("utf-8")
|
|
119
|
+
req = urllib.request.Request(
|
|
120
|
+
self._push_url,
|
|
121
|
+
data=body,
|
|
122
|
+
headers={"Content-Type": "application/json"},
|
|
123
|
+
method="POST",
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
for attempt in range(3):
|
|
127
|
+
try:
|
|
128
|
+
with urllib.request.urlopen(req, timeout=10) as resp:
|
|
129
|
+
# Loki returns 204 No Content on success
|
|
130
|
+
if resp.status in (200, 204):
|
|
131
|
+
return {"ingested": len(records)}
|
|
132
|
+
raise PluginError(
|
|
133
|
+
"UNEXPECTED_STATUS",
|
|
134
|
+
f"Loki push returned unexpected status {resp.status}",
|
|
135
|
+
)
|
|
136
|
+
except urllib.error.HTTPError as e:
|
|
137
|
+
if e.code in (429, 500, 502, 503, 504) and attempt < 2:
|
|
138
|
+
time.sleep(0.5 * (2 ** attempt))
|
|
139
|
+
continue
|
|
140
|
+
try:
|
|
141
|
+
detail = e.read().decode("utf-8", errors="replace")[:300]
|
|
142
|
+
except Exception:
|
|
143
|
+
detail = ""
|
|
144
|
+
raise PluginError(
|
|
145
|
+
"HTTP_ERROR",
|
|
146
|
+
f"Loki push failed with HTTP {e.code}: {detail}",
|
|
147
|
+
)
|
|
148
|
+
except urllib.error.URLError as e:
|
|
149
|
+
if attempt < 2:
|
|
150
|
+
time.sleep(0.5 * (2 ** attempt))
|
|
151
|
+
continue
|
|
152
|
+
raise PluginError(
|
|
153
|
+
"SEND_FAILED",
|
|
154
|
+
f"Failed to connect to Loki: {e.reason}",
|
|
155
|
+
)
|
|
156
|
+
except PluginError:
|
|
157
|
+
raise
|
|
158
|
+
except Exception as e:
|
|
159
|
+
if attempt < 2:
|
|
160
|
+
time.sleep(0.5 * (2 ** attempt))
|
|
161
|
+
continue
|
|
162
|
+
raise PluginError("SEND_FAILED", f"Loki send failed: {e}")
|
|
163
|
+
|
|
164
|
+
raise PluginError("SEND_FAILED", "Failed to push to Loki after retries")
|
|
165
|
+
|
|
166
|
+
def check(self) -> str:
|
|
167
|
+
"""Verify Loki is reachable via its /ready endpoint."""
|
|
168
|
+
try:
|
|
169
|
+
req = urllib.request.Request(self._ready_url, method="GET")
|
|
170
|
+
with urllib.request.urlopen(req, timeout=5) as resp:
|
|
171
|
+
if resp.status == 200:
|
|
172
|
+
return f"Loki: OK ({self._push_url})"
|
|
173
|
+
return f"Loki: /ready returned {resp.status}"
|
|
174
|
+
except Exception as e:
|
|
175
|
+
raise Exception(f"Loki not reachable at {self._ready_url}: {e}")
|
|
176
|
+
|
|
177
|
+
def display_info(self) -> str:
|
|
178
|
+
return f"Loki: {self._display_url}"
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
# Self-register when this module is imported
|
|
182
|
+
register_plugin(LokiOutputPlugin)
|
|
@@ -44,6 +44,13 @@ from .ingestor import ingest_records
|
|
|
44
44
|
from .plugins import get_plugin_for_url
|
|
45
45
|
from ..version import __version__
|
|
46
46
|
|
|
47
|
+
# Register built-in output plugins by importing them (side-effect: register_plugin is called)
|
|
48
|
+
try:
|
|
49
|
+
from . import loki_plugin as _loki_plugin # noqa: F401
|
|
50
|
+
except ImportError:
|
|
51
|
+
pass
|
|
52
|
+
|
|
53
|
+
|
|
47
54
|
@asynccontextmanager
|
|
48
55
|
async def lifespan(app: FastAPI):
|
|
49
56
|
"""Emit a startup trace to the index so operators can see when the collector started."""
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# Loki query module for devlogs
|
|
@@ -0,0 +1,332 @@
|
|
|
1
|
+
# LogQL query module for devlogs
|
|
2
|
+
#
|
|
3
|
+
# Provides search, tail, and aggregation operations against a Grafana Loki
|
|
4
|
+
# instance via its HTTP API.
|
|
5
|
+
#
|
|
6
|
+
# Label strategy mirrors the collector's Loki plugin:
|
|
7
|
+
# Indexed labels: application, component, level, area, environment
|
|
8
|
+
# Log line payload: message, operation_id, timestamp, fields, version
|
|
9
|
+
|
|
10
|
+
import json
|
|
11
|
+
import re
|
|
12
|
+
import urllib.error
|
|
13
|
+
import urllib.parse
|
|
14
|
+
import urllib.request
|
|
15
|
+
from datetime import datetime, timedelta, timezone
|
|
16
|
+
from typing import Any, Callable, Dict, List, Optional
|
|
17
|
+
|
|
18
|
+
# ---------------------------------------------------------------------------
|
|
19
|
+
# Input validation
|
|
20
|
+
# ---------------------------------------------------------------------------
|
|
21
|
+
|
|
22
|
+
_LABEL_NAME_RE = re.compile(r'^[a-z_][a-z0-9_]*$')
|
|
23
|
+
_DURATION_RE = re.compile(r'^[0-9]+(ms|s|m|h|d|w)$')
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def _validate_label_name(name: str) -> str:
|
|
27
|
+
"""Ensure a label name contains only safe characters."""
|
|
28
|
+
if not _LABEL_NAME_RE.match(name):
|
|
29
|
+
raise ValueError(f"Invalid label name for group_by: {name!r}")
|
|
30
|
+
return name
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def _validate_duration(interval: str) -> str:
|
|
34
|
+
"""Ensure a duration string matches the Loki duration format."""
|
|
35
|
+
if not _DURATION_RE.match(interval):
|
|
36
|
+
raise ValueError(
|
|
37
|
+
f"Invalid interval {interval!r}. Must match [0-9]+(ms|s|m|h|d|w), e.g. '5m', '1h'."
|
|
38
|
+
)
|
|
39
|
+
return interval
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
# ---------------------------------------------------------------------------
|
|
43
|
+
# LogQL builder helpers
|
|
44
|
+
# ---------------------------------------------------------------------------
|
|
45
|
+
|
|
46
|
+
def build_stream_selector(labels: Dict[str, str]) -> str:
|
|
47
|
+
"""Build a Loki stream selector from a label dict.
|
|
48
|
+
|
|
49
|
+
Returns e.g. {application="my-app", level="error"}
|
|
50
|
+
Returns {} when labels is empty (matches all streams).
|
|
51
|
+
"""
|
|
52
|
+
if not labels:
|
|
53
|
+
return "{}"
|
|
54
|
+
parts = ", ".join(
|
|
55
|
+
f'{k}="{_escape_label_value(v)}"'
|
|
56
|
+
for k, v in sorted(labels.items())
|
|
57
|
+
)
|
|
58
|
+
return "{" + parts + "}"
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def build_log_pipeline(filter_text: Optional[str], use_json: bool = True) -> str:
|
|
62
|
+
"""Build a Loki log pipeline expression.
|
|
63
|
+
|
|
64
|
+
Returns e.g. | json | message =~ "timeout"
|
|
65
|
+
"""
|
|
66
|
+
parts = []
|
|
67
|
+
if use_json:
|
|
68
|
+
parts.append("| json")
|
|
69
|
+
if filter_text:
|
|
70
|
+
escaped = filter_text.replace("\\", "\\\\").replace('"', '\\"')
|
|
71
|
+
parts.append(f'|= "{escaped}"')
|
|
72
|
+
return " ".join(parts)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def _escape_label_value(value: str) -> str:
|
|
76
|
+
return value.replace("\\", "\\\\").replace('"', '\\"')
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
# ---------------------------------------------------------------------------
|
|
80
|
+
# Timestamp helpers
|
|
81
|
+
# ---------------------------------------------------------------------------
|
|
82
|
+
|
|
83
|
+
def _to_ns(dt: datetime) -> int:
|
|
84
|
+
"""Convert datetime to Unix nanoseconds (Loki's time format)."""
|
|
85
|
+
return int(dt.timestamp() * 1e9)
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def _parse_time_param(value: Optional[str]) -> Optional[datetime]:
|
|
89
|
+
"""Parse an ISO timestamp or relative duration string to a datetime.
|
|
90
|
+
|
|
91
|
+
Accepts:
|
|
92
|
+
- ISO 8601: "2024-01-15T10:30:00Z"
|
|
93
|
+
- Relative: "1h", "30m", "7d"
|
|
94
|
+
- None → returns None
|
|
95
|
+
"""
|
|
96
|
+
if not value:
|
|
97
|
+
return None
|
|
98
|
+
from ..time_utils import resolve_relative_time
|
|
99
|
+
resolved = resolve_relative_time(value)
|
|
100
|
+
if resolved is None:
|
|
101
|
+
return None
|
|
102
|
+
try:
|
|
103
|
+
ts = resolved.rstrip("Z")
|
|
104
|
+
return datetime.fromisoformat(ts).replace(tzinfo=timezone.utc)
|
|
105
|
+
except ValueError:
|
|
106
|
+
return None
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
# ---------------------------------------------------------------------------
|
|
110
|
+
# HTTP helpers
|
|
111
|
+
# ---------------------------------------------------------------------------
|
|
112
|
+
|
|
113
|
+
def _loki_get(loki_url: str, path: str, params: Dict[str, str]) -> Dict[str, Any]:
|
|
114
|
+
"""Execute a GET request against the Loki HTTP API."""
|
|
115
|
+
base = loki_url.rstrip("/")
|
|
116
|
+
qs = urllib.parse.urlencode(params)
|
|
117
|
+
url = f"{base}{path}?{qs}"
|
|
118
|
+
req = urllib.request.Request(url, method="GET")
|
|
119
|
+
try:
|
|
120
|
+
with urllib.request.urlopen(req, timeout=30) as resp:
|
|
121
|
+
return json.loads(resp.read().decode("utf-8"))
|
|
122
|
+
except urllib.error.HTTPError as e:
|
|
123
|
+
detail = ""
|
|
124
|
+
try:
|
|
125
|
+
detail = e.read().decode("utf-8", errors="replace")[:300]
|
|
126
|
+
except Exception:
|
|
127
|
+
pass
|
|
128
|
+
raise RuntimeError(f"Loki HTTP {e.code}: {detail}")
|
|
129
|
+
except urllib.error.URLError as e:
|
|
130
|
+
raise RuntimeError(f"Loki connection failed: {e.reason}")
|
|
131
|
+
except Exception as e:
|
|
132
|
+
raise RuntimeError(f"Loki request failed: {e}")
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
# ---------------------------------------------------------------------------
|
|
136
|
+
# Response parsing
|
|
137
|
+
# ---------------------------------------------------------------------------
|
|
138
|
+
|
|
139
|
+
def _parse_log_streams(data: Dict[str, Any]) -> List[Dict[str, Any]]:
|
|
140
|
+
"""Parse a Loki query_range (streams) response into a list of log dicts."""
|
|
141
|
+
results = []
|
|
142
|
+
for stream in data.get("data", {}).get("result", []):
|
|
143
|
+
stream_labels = stream.get("stream", {})
|
|
144
|
+
for ts_ns, line in stream.get("values", []):
|
|
145
|
+
try:
|
|
146
|
+
log_data: Dict[str, Any] = json.loads(line)
|
|
147
|
+
except json.JSONDecodeError:
|
|
148
|
+
log_data = {"message": line}
|
|
149
|
+
# Merge stream labels into the record (labels take precedence)
|
|
150
|
+
log_data.update(stream_labels)
|
|
151
|
+
# Add the Loki timestamp (nanoseconds) as a convenience field
|
|
152
|
+
log_data["_loki_ts_ns"] = ts_ns
|
|
153
|
+
results.append(log_data)
|
|
154
|
+
return results
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def _parse_metric_matrix(data: Dict[str, Any]) -> List[Dict[str, Any]]:
|
|
158
|
+
"""Parse a Loki metric query_range (matrix) response."""
|
|
159
|
+
results = []
|
|
160
|
+
for series in data.get("data", {}).get("result", []):
|
|
161
|
+
labels = series.get("metric", {})
|
|
162
|
+
for ts, value in series.get("values", []):
|
|
163
|
+
entry: Dict[str, Any] = {
|
|
164
|
+
"timestamp": datetime.fromtimestamp(float(ts), tz=timezone.utc)
|
|
165
|
+
.strftime("%Y-%m-%dT%H:%M:%S.000Z"),
|
|
166
|
+
"value": float(value),
|
|
167
|
+
}
|
|
168
|
+
entry.update(labels)
|
|
169
|
+
results.append(entry)
|
|
170
|
+
return results
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
# ---------------------------------------------------------------------------
|
|
174
|
+
# Public query API
|
|
175
|
+
# ---------------------------------------------------------------------------
|
|
176
|
+
|
|
177
|
+
def search(
|
|
178
|
+
loki_url: str,
|
|
179
|
+
app: str,
|
|
180
|
+
level: Optional[str] = None,
|
|
181
|
+
component: Optional[str] = None,
|
|
182
|
+
area: Optional[str] = None,
|
|
183
|
+
environment: Optional[str] = None,
|
|
184
|
+
start: Optional[str] = None,
|
|
185
|
+
end: Optional[str] = None,
|
|
186
|
+
limit: int = 100,
|
|
187
|
+
filter_text: Optional[str] = None,
|
|
188
|
+
) -> List[Dict[str, Any]]:
|
|
189
|
+
"""Search log entries in a time range.
|
|
190
|
+
|
|
191
|
+
Maps to GET /loki/api/v1/query_range with direction=backward.
|
|
192
|
+
|
|
193
|
+
Args:
|
|
194
|
+
loki_url: Base URL of the Loki instance (e.g. "http://loki:3100")
|
|
195
|
+
app: Application label to filter by
|
|
196
|
+
level: Log level label filter (e.g. "error")
|
|
197
|
+
component: Component label filter
|
|
198
|
+
area: Area label filter
|
|
199
|
+
environment: Environment label filter
|
|
200
|
+
start: ISO timestamp or relative duration string (e.g. "1h") for range start
|
|
201
|
+
end: ISO timestamp or relative duration for range end
|
|
202
|
+
limit: Maximum number of entries to return
|
|
203
|
+
filter_text: Optional text to match against the message field
|
|
204
|
+
|
|
205
|
+
Returns:
|
|
206
|
+
List of log record dicts (stream labels merged with JSON payload)
|
|
207
|
+
"""
|
|
208
|
+
now = datetime.now(timezone.utc)
|
|
209
|
+
start_dt = _parse_time_param(start) or (now - timedelta(hours=1))
|
|
210
|
+
end_dt = _parse_time_param(end) or now
|
|
211
|
+
|
|
212
|
+
labels: Dict[str, str] = {"application": app}
|
|
213
|
+
if level:
|
|
214
|
+
labels["level"] = level.lower()
|
|
215
|
+
if component:
|
|
216
|
+
labels["component"] = component
|
|
217
|
+
if area:
|
|
218
|
+
labels["area"] = area
|
|
219
|
+
if environment:
|
|
220
|
+
labels["environment"] = environment
|
|
221
|
+
|
|
222
|
+
selector = build_stream_selector(labels)
|
|
223
|
+
pipeline = build_log_pipeline(filter_text)
|
|
224
|
+
query = f"{selector} {pipeline}".strip()
|
|
225
|
+
|
|
226
|
+
data = _loki_get(loki_url, "/loki/api/v1/query_range", {
|
|
227
|
+
"query": query,
|
|
228
|
+
"start": str(_to_ns(start_dt)),
|
|
229
|
+
"end": str(_to_ns(end_dt)),
|
|
230
|
+
"limit": str(limit),
|
|
231
|
+
"direction": "backward",
|
|
232
|
+
})
|
|
233
|
+
|
|
234
|
+
entries = _parse_log_streams(data)
|
|
235
|
+
# Sort by Loki timestamp descending (most recent first)
|
|
236
|
+
entries.sort(key=lambda e: e.get("_loki_ts_ns", "0"), reverse=True)
|
|
237
|
+
return entries
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
def tail(
|
|
241
|
+
loki_url: str,
|
|
242
|
+
app: str,
|
|
243
|
+
level: Optional[str] = None,
|
|
244
|
+
component: Optional[str] = None,
|
|
245
|
+
since: Optional[str] = None,
|
|
246
|
+
limit: int = 50,
|
|
247
|
+
) -> List[Dict[str, Any]]:
|
|
248
|
+
"""Get the most recent log entries (non-streaming snapshot).
|
|
249
|
+
|
|
250
|
+
Uses query_range with a short lookback window. For real-time streaming
|
|
251
|
+
use the WebSocket endpoint /loki/api/v1/tail directly.
|
|
252
|
+
|
|
253
|
+
Args:
|
|
254
|
+
loki_url: Base URL of the Loki instance
|
|
255
|
+
app: Application label to filter by
|
|
256
|
+
level: Optional level label filter
|
|
257
|
+
component: Optional component label filter
|
|
258
|
+
since: Lookback window as relative string (default "10m")
|
|
259
|
+
limit: Maximum number of entries to return
|
|
260
|
+
"""
|
|
261
|
+
now = datetime.now(timezone.utc)
|
|
262
|
+
start_dt = _parse_time_param(since or "10m") or (now - timedelta(minutes=10))
|
|
263
|
+
|
|
264
|
+
labels: Dict[str, str] = {"application": app}
|
|
265
|
+
if level:
|
|
266
|
+
labels["level"] = level.lower()
|
|
267
|
+
if component:
|
|
268
|
+
labels["component"] = component
|
|
269
|
+
|
|
270
|
+
selector = build_stream_selector(labels)
|
|
271
|
+
query = f"{selector} | json"
|
|
272
|
+
|
|
273
|
+
data = _loki_get(loki_url, "/loki/api/v1/query_range", {
|
|
274
|
+
"query": query,
|
|
275
|
+
"start": str(_to_ns(start_dt)),
|
|
276
|
+
"end": str(_to_ns(now)),
|
|
277
|
+
"limit": str(limit),
|
|
278
|
+
"direction": "backward",
|
|
279
|
+
})
|
|
280
|
+
|
|
281
|
+
entries = _parse_log_streams(data)
|
|
282
|
+
entries.sort(key=lambda e: e.get("_loki_ts_ns", "0"), reverse=True)
|
|
283
|
+
return entries
|
|
284
|
+
|
|
285
|
+
|
|
286
|
+
def count_over_time(
|
|
287
|
+
loki_url: str,
|
|
288
|
+
app: str,
|
|
289
|
+
interval: str = "5m",
|
|
290
|
+
group_by: Optional[List[str]] = None,
|
|
291
|
+
start: Optional[str] = None,
|
|
292
|
+
end: Optional[str] = None,
|
|
293
|
+
) -> List[Dict[str, Any]]:
|
|
294
|
+
"""Aggregate log counts over a time interval.
|
|
295
|
+
|
|
296
|
+
Maps to GET /loki/api/v1/query_range with a metric query.
|
|
297
|
+
|
|
298
|
+
LogQL example:
|
|
299
|
+
sum by (level) (count_over_time({application="my-app"}[5m]))
|
|
300
|
+
|
|
301
|
+
Args:
|
|
302
|
+
loki_url: Base URL of the Loki instance
|
|
303
|
+
app: Application label to filter by
|
|
304
|
+
interval: Duration window for count_over_time (e.g. "1m", "5m", "1h")
|
|
305
|
+
group_by: Labels to group by (e.g. ["level", "component"])
|
|
306
|
+
start: Range start as ISO timestamp or relative string
|
|
307
|
+
end: Range end as ISO timestamp or relative string
|
|
308
|
+
"""
|
|
309
|
+
now = datetime.now(timezone.utc)
|
|
310
|
+
start_dt = _parse_time_param(start) or (now - timedelta(hours=1))
|
|
311
|
+
end_dt = _parse_time_param(end) or now
|
|
312
|
+
|
|
313
|
+
_validate_duration(interval)
|
|
314
|
+
|
|
315
|
+
selector = build_stream_selector({"application": app})
|
|
316
|
+
inner = f"count_over_time({selector}[{interval}])"
|
|
317
|
+
|
|
318
|
+
if group_by:
|
|
319
|
+
validated_labels = [_validate_label_name(lbl) for lbl in group_by]
|
|
320
|
+
by_clause = ", ".join(validated_labels)
|
|
321
|
+
query = f"sum by ({by_clause}) ({inner})"
|
|
322
|
+
else:
|
|
323
|
+
query = f"sum({inner})"
|
|
324
|
+
|
|
325
|
+
data = _loki_get(loki_url, "/loki/api/v1/query_range", {
|
|
326
|
+
"query": query,
|
|
327
|
+
"start": str(_to_ns(start_dt)),
|
|
328
|
+
"end": str(_to_ns(end_dt)),
|
|
329
|
+
"step": interval,
|
|
330
|
+
})
|
|
331
|
+
|
|
332
|
+
return _parse_metric_matrix(data)
|
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
import asyncio
|
|
4
4
|
import json
|
|
5
|
+
import os
|
|
5
6
|
from typing import Any
|
|
6
7
|
|
|
7
8
|
import mcp.server.stdio
|
|
@@ -113,6 +114,101 @@ def _error_response(message: str, error_type: str = "Error") -> list[types.TextC
|
|
|
113
114
|
return _json_response(error={"type": error_type, "message": message})
|
|
114
115
|
|
|
115
116
|
|
|
117
|
+
def _get_loki_url() -> str | None:
|
|
118
|
+
"""Return LOKI_URL from environment, or None if not set."""
|
|
119
|
+
return os.environ.get("LOKI_URL") or None
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def _handle_loki_search(arguments: dict) -> list[types.TextContent]:
|
|
123
|
+
"""Handle search_logs when LOKI_URL is configured."""
|
|
124
|
+
from ..loki.queries import search as loki_search
|
|
125
|
+
|
|
126
|
+
loki_url = _get_loki_url()
|
|
127
|
+
app = arguments.get("application") or load_config().application
|
|
128
|
+
if not app:
|
|
129
|
+
return _error_response(
|
|
130
|
+
"application is required for Loki backend (set via argument or DEVLOGS_URL)",
|
|
131
|
+
"ValidationError",
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
try:
|
|
135
|
+
entries = loki_search(
|
|
136
|
+
loki_url=loki_url,
|
|
137
|
+
app=app,
|
|
138
|
+
level=arguments.get("level"),
|
|
139
|
+
component=arguments.get("component"),
|
|
140
|
+
area=arguments.get("area"),
|
|
141
|
+
start=arguments.get("since"),
|
|
142
|
+
end=arguments.get("until"),
|
|
143
|
+
limit=_coerce_limit(arguments.get("limit"), 50, 100),
|
|
144
|
+
filter_text=arguments.get("query"),
|
|
145
|
+
)
|
|
146
|
+
return _json_response(
|
|
147
|
+
data={"entries": entries},
|
|
148
|
+
meta={"count": len(entries)},
|
|
149
|
+
)
|
|
150
|
+
except Exception as e:
|
|
151
|
+
return _error_response(f"Loki search error: {e}", "SearchError")
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
def _handle_loki_tail(arguments: dict) -> list[types.TextContent]:
|
|
155
|
+
"""Handle tail_logs when LOKI_URL is configured."""
|
|
156
|
+
from ..loki.queries import tail as loki_tail
|
|
157
|
+
|
|
158
|
+
loki_url = _get_loki_url()
|
|
159
|
+
app = arguments.get("application") or load_config().application
|
|
160
|
+
if not app:
|
|
161
|
+
return _error_response(
|
|
162
|
+
"application is required for Loki backend (set via argument or DEVLOGS_URL)",
|
|
163
|
+
"ValidationError",
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
try:
|
|
167
|
+
entries = loki_tail(
|
|
168
|
+
loki_url=loki_url,
|
|
169
|
+
app=app,
|
|
170
|
+
level=arguments.get("level"),
|
|
171
|
+
component=arguments.get("component"),
|
|
172
|
+
since=arguments.get("since", "10m"),
|
|
173
|
+
limit=_coerce_limit(arguments.get("limit"), 20, 100),
|
|
174
|
+
)
|
|
175
|
+
return _json_response(
|
|
176
|
+
data={"entries": entries},
|
|
177
|
+
meta={"count": len(entries)},
|
|
178
|
+
)
|
|
179
|
+
except Exception as e:
|
|
180
|
+
return _error_response(f"Loki tail error: {e}", "TailError")
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def _handle_loki_get_log_stats(arguments: dict) -> list[types.TextContent]:
|
|
184
|
+
"""Handle get_log_stats using Loki count_over_time."""
|
|
185
|
+
from ..loki.queries import count_over_time as loki_count_over_time
|
|
186
|
+
|
|
187
|
+
loki_url = _get_loki_url()
|
|
188
|
+
app = arguments.get("application") or load_config().application
|
|
189
|
+
if not app:
|
|
190
|
+
return _error_response(
|
|
191
|
+
"application is required for Loki backend (set via argument or DEVLOGS_URL)",
|
|
192
|
+
"ValidationError",
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
try:
|
|
196
|
+
stats = loki_count_over_time(
|
|
197
|
+
loki_url=loki_url,
|
|
198
|
+
app=app,
|
|
199
|
+
interval=arguments.get("interval", "5m"),
|
|
200
|
+
group_by=arguments.get("group_by"),
|
|
201
|
+
start=arguments.get("since"),
|
|
202
|
+
end=arguments.get("until"),
|
|
203
|
+
)
|
|
204
|
+
return _json_response(
|
|
205
|
+
data={"stats": stats},
|
|
206
|
+
meta={"count": len(stats)},
|
|
207
|
+
)
|
|
208
|
+
except Exception as e:
|
|
209
|
+
return _error_response(f"Loki stats error: {e}", "StatsError")
|
|
210
|
+
|
|
211
|
+
|
|
116
212
|
def _handle_emit_log(arguments: dict) -> list[types.TextContent]:
|
|
117
213
|
"""Handle the emit_log tool call."""
|
|
118
214
|
from ..devlogs_client import DevlogsClient
|
|
@@ -568,6 +664,37 @@ async def main():
|
|
|
568
664
|
"required": ["anchor_timestamp"],
|
|
569
665
|
},
|
|
570
666
|
),
|
|
667
|
+
types.Tool(
|
|
668
|
+
name="get_log_stats",
|
|
669
|
+
description="Get log counts aggregated over a time interval. Requires Loki backend (LOKI_URL).",
|
|
670
|
+
inputSchema={
|
|
671
|
+
"type": "object",
|
|
672
|
+
"properties": {
|
|
673
|
+
"application": {
|
|
674
|
+
"type": "string",
|
|
675
|
+
"description": "Application name to aggregate stats for",
|
|
676
|
+
},
|
|
677
|
+
"interval": {
|
|
678
|
+
"type": "string",
|
|
679
|
+
"description": "Aggregation interval (e.g. '1m', '5m', '1h')",
|
|
680
|
+
"default": "5m",
|
|
681
|
+
},
|
|
682
|
+
"group_by": {
|
|
683
|
+
"type": "array",
|
|
684
|
+
"items": {"type": "string"},
|
|
685
|
+
"description": "Labels to group by (e.g. ['level', 'component'])",
|
|
686
|
+
},
|
|
687
|
+
"since": {
|
|
688
|
+
"type": "string",
|
|
689
|
+
"description": "Range start as ISO timestamp or relative duration like '1h'",
|
|
690
|
+
},
|
|
691
|
+
"until": {
|
|
692
|
+
"type": "string",
|
|
693
|
+
"description": "Range end as ISO timestamp or relative duration",
|
|
694
|
+
},
|
|
695
|
+
},
|
|
696
|
+
},
|
|
697
|
+
),
|
|
571
698
|
types.Tool(
|
|
572
699
|
name="emit_log",
|
|
573
700
|
description="Emit a log entry to the configured devlogs backend (collector, OpenSearch, or plugin).",
|
|
@@ -599,6 +726,21 @@ async def main():
|
|
|
599
726
|
if name == "emit_log":
|
|
600
727
|
return _handle_emit_log(arguments)
|
|
601
728
|
|
|
729
|
+
# get_log_stats is a Loki-only tool
|
|
730
|
+
if name == "get_log_stats":
|
|
731
|
+
if not _get_loki_url():
|
|
732
|
+
return _error_response(
|
|
733
|
+
"get_log_stats requires LOKI_URL to be set", "ConfigurationError"
|
|
734
|
+
)
|
|
735
|
+
return _handle_loki_get_log_stats(arguments)
|
|
736
|
+
|
|
737
|
+
# Route search_logs and tail_logs to Loki when LOKI_URL is configured
|
|
738
|
+
if _get_loki_url():
|
|
739
|
+
if name == "search_logs":
|
|
740
|
+
return _handle_loki_search(arguments)
|
|
741
|
+
if name == "tail_logs":
|
|
742
|
+
return _handle_loki_tail(arguments)
|
|
743
|
+
|
|
602
744
|
try:
|
|
603
745
|
client, index, config_application = _create_client_and_index()
|
|
604
746
|
except RuntimeError as e:
|
|
@@ -31,12 +31,15 @@ src/devlogs/collector/cli.py
|
|
|
31
31
|
src/devlogs/collector/errors.py
|
|
32
32
|
src/devlogs/collector/forwarder.py
|
|
33
33
|
src/devlogs/collector/ingestor.py
|
|
34
|
+
src/devlogs/collector/loki_plugin.py
|
|
34
35
|
src/devlogs/collector/plugins.py
|
|
35
36
|
src/devlogs/collector/schema.py
|
|
36
37
|
src/devlogs/collector/server.py
|
|
37
38
|
src/devlogs/jenkins/__init__.py
|
|
38
39
|
src/devlogs/jenkins/cli.py
|
|
39
40
|
src/devlogs/jenkins/core.py
|
|
41
|
+
src/devlogs/loki/__init__.py
|
|
42
|
+
src/devlogs/loki/queries.py
|
|
40
43
|
src/devlogs/mcp/__init__.py
|
|
41
44
|
src/devlogs/mcp/server.py
|
|
42
45
|
src/devlogs/opensearch/__init__.py
|
|
@@ -91,7 +91,8 @@ class TestEnvBuildIdPrecedence:
|
|
|
91
91
|
class TestEnvBranchAndTimestamp:
|
|
92
92
|
"""Test env provides branch and timestamp (build_id computed)."""
|
|
93
93
|
|
|
94
|
-
def test_env_branch_generates_build_id(self, monkeypatch):
|
|
94
|
+
def test_env_branch_generates_build_id(self, monkeypatch, tmp_path):
|
|
95
|
+
monkeypatch.chdir(tmp_path)
|
|
95
96
|
monkeypatch.delenv("DEVLOGS_BUILD_ID", raising=False)
|
|
96
97
|
monkeypatch.setenv("DEVLOGS_BRANCH", "feature/my-feature")
|
|
97
98
|
|
|
@@ -102,7 +103,8 @@ class TestEnvBranchAndTimestamp:
|
|
|
102
103
|
assert result.timestamp_utc == FIXED_TIMESTAMP
|
|
103
104
|
assert result.source == "env"
|
|
104
105
|
|
|
105
|
-
def test_env_timestamp_used(self, monkeypatch):
|
|
106
|
+
def test_env_timestamp_used(self, monkeypatch, tmp_path):
|
|
107
|
+
monkeypatch.chdir(tmp_path)
|
|
106
108
|
monkeypatch.delenv("DEVLOGS_BUILD_ID", raising=False)
|
|
107
109
|
monkeypatch.setenv("DEVLOGS_BRANCH", "main")
|
|
108
110
|
monkeypatch.setenv("DEVLOGS_BUILD_TIMESTAMP_UTC", "20250101T120000Z")
|
|
@@ -333,7 +335,8 @@ class TestWriteIfMissing:
|
|
|
333
335
|
class TestAllowGitFalse:
|
|
334
336
|
"""Test that allow_git=False never calls git."""
|
|
335
337
|
|
|
336
|
-
def test_no_subprocess_called_when_allow_git_false(self, monkeypatch):
|
|
338
|
+
def test_no_subprocess_called_when_allow_git_false(self, monkeypatch, tmp_path):
|
|
339
|
+
monkeypatch.chdir(tmp_path)
|
|
337
340
|
monkeypatch.delenv("DEVLOGS_BUILD_ID", raising=False)
|
|
338
341
|
monkeypatch.delenv("DEVLOGS_BRANCH", raising=False)
|
|
339
342
|
|
|
@@ -357,7 +360,8 @@ class TestAllowGitFalse:
|
|
|
357
360
|
class TestAllowGitTrue:
|
|
358
361
|
"""Test allow_git=True behavior with various git scenarios."""
|
|
359
362
|
|
|
360
|
-
def test_git_success_gets_branch(self, monkeypatch):
|
|
363
|
+
def test_git_success_gets_branch(self, monkeypatch, tmp_path):
|
|
364
|
+
monkeypatch.chdir(tmp_path)
|
|
361
365
|
monkeypatch.delenv("DEVLOGS_BUILD_ID", raising=False)
|
|
362
366
|
monkeypatch.delenv("DEVLOGS_BRANCH", raising=False)
|
|
363
367
|
|
|
@@ -371,7 +375,8 @@ class TestAllowGitTrue:
|
|
|
371
375
|
assert result.branch == "feature/test-branch"
|
|
372
376
|
assert result.build_id == f"feature/test-branch-{FIXED_TIMESTAMP}"
|
|
373
377
|
|
|
374
|
-
def test_git_command_not_found(self, monkeypatch):
|
|
378
|
+
def test_git_command_not_found(self, monkeypatch, tmp_path):
|
|
379
|
+
monkeypatch.chdir(tmp_path)
|
|
375
380
|
monkeypatch.delenv("DEVLOGS_BUILD_ID", raising=False)
|
|
376
381
|
monkeypatch.delenv("DEVLOGS_BRANCH", raising=False)
|
|
377
382
|
|
|
@@ -381,7 +386,8 @@ class TestAllowGitTrue:
|
|
|
381
386
|
assert result.branch is None
|
|
382
387
|
assert result.build_id == f"unknown-{FIXED_TIMESTAMP}"
|
|
383
388
|
|
|
384
|
-
def test_git_nonzero_exit(self, monkeypatch):
|
|
389
|
+
def test_git_nonzero_exit(self, monkeypatch, tmp_path):
|
|
390
|
+
monkeypatch.chdir(tmp_path)
|
|
385
391
|
monkeypatch.delenv("DEVLOGS_BUILD_ID", raising=False)
|
|
386
392
|
monkeypatch.delenv("DEVLOGS_BRANCH", raising=False)
|
|
387
393
|
|
|
@@ -395,7 +401,8 @@ class TestAllowGitTrue:
|
|
|
395
401
|
assert result.branch is None
|
|
396
402
|
assert result.build_id == f"unknown-{FIXED_TIMESTAMP}"
|
|
397
403
|
|
|
398
|
-
def test_git_timeout(self, monkeypatch):
|
|
404
|
+
def test_git_timeout(self, monkeypatch, tmp_path):
|
|
405
|
+
monkeypatch.chdir(tmp_path)
|
|
399
406
|
monkeypatch.delenv("DEVLOGS_BUILD_ID", raising=False)
|
|
400
407
|
monkeypatch.delenv("DEVLOGS_BRANCH", raising=False)
|
|
401
408
|
|
|
@@ -405,7 +412,8 @@ class TestAllowGitTrue:
|
|
|
405
412
|
assert result.branch is None
|
|
406
413
|
assert result.build_id == f"unknown-{FIXED_TIMESTAMP}"
|
|
407
414
|
|
|
408
|
-
def test_git_returns_HEAD_detached(self, monkeypatch):
|
|
415
|
+
def test_git_returns_HEAD_detached(self, monkeypatch, tmp_path):
|
|
416
|
+
monkeypatch.chdir(tmp_path)
|
|
409
417
|
monkeypatch.delenv("DEVLOGS_BUILD_ID", raising=False)
|
|
410
418
|
monkeypatch.delenv("DEVLOGS_BRANCH", raising=False)
|
|
411
419
|
|
|
@@ -434,7 +442,8 @@ class TestDeterministicBuildId:
|
|
|
434
442
|
assert result1.build_id == result2.build_id
|
|
435
443
|
assert result1.timestamp_utc == result2.timestamp_utc
|
|
436
444
|
|
|
437
|
-
def test_different_now_fn_gives_different_result(self, monkeypatch):
|
|
445
|
+
def test_different_now_fn_gives_different_result(self, monkeypatch, tmp_path):
|
|
446
|
+
monkeypatch.chdir(tmp_path)
|
|
438
447
|
monkeypatch.delenv("DEVLOGS_BUILD_ID", raising=False)
|
|
439
448
|
monkeypatch.delenv("DEVLOGS_BRANCH", raising=False)
|
|
440
449
|
|
|
@@ -533,7 +542,8 @@ class TestSearchUpBehavior:
|
|
|
533
542
|
class TestResolveBuildId:
|
|
534
543
|
"""Test the resolve_build_id convenience function."""
|
|
535
544
|
|
|
536
|
-
def test_returns_string_only(self, monkeypatch):
|
|
545
|
+
def test_returns_string_only(self, monkeypatch, tmp_path):
|
|
546
|
+
monkeypatch.chdir(tmp_path)
|
|
537
547
|
monkeypatch.delenv("DEVLOGS_BUILD_ID", raising=False)
|
|
538
548
|
monkeypatch.delenv("DEVLOGS_BRANCH", raising=False)
|
|
539
549
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|