holmesgpt 0.16.2a0__py3-none-any.whl → 0.18.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- holmes/__init__.py +3 -5
- holmes/clients/robusta_client.py +4 -3
- holmes/common/env_vars.py +18 -2
- holmes/common/openshift.py +1 -1
- holmes/config.py +11 -6
- holmes/core/conversations.py +30 -13
- holmes/core/investigation.py +21 -25
- holmes/core/investigation_structured_output.py +3 -3
- holmes/core/issue.py +1 -1
- holmes/core/llm.py +50 -31
- holmes/core/models.py +19 -17
- holmes/core/openai_formatting.py +1 -1
- holmes/core/prompt.py +47 -2
- holmes/core/runbooks.py +1 -0
- holmes/core/safeguards.py +4 -2
- holmes/core/supabase_dal.py +4 -2
- holmes/core/tool_calling_llm.py +102 -141
- holmes/core/tools.py +19 -28
- holmes/core/tools_utils/token_counting.py +9 -2
- holmes/core/tools_utils/tool_context_window_limiter.py +13 -30
- holmes/core/tools_utils/tool_executor.py +0 -18
- holmes/core/tools_utils/toolset_utils.py +1 -0
- holmes/core/toolset_manager.py +37 -2
- holmes/core/tracing.py +13 -2
- holmes/core/transformers/__init__.py +1 -1
- holmes/core/transformers/base.py +1 -0
- holmes/core/transformers/llm_summarize.py +3 -2
- holmes/core/transformers/registry.py +2 -1
- holmes/core/transformers/transformer.py +1 -0
- holmes/core/truncation/compaction.py +37 -2
- holmes/core/truncation/input_context_window_limiter.py +3 -2
- holmes/interactive.py +52 -8
- holmes/main.py +17 -37
- holmes/plugins/interfaces.py +2 -1
- holmes/plugins/prompts/__init__.py +2 -1
- holmes/plugins/prompts/_fetch_logs.jinja2 +5 -5
- holmes/plugins/prompts/_runbook_instructions.jinja2 +2 -1
- holmes/plugins/prompts/base_user_prompt.jinja2 +7 -0
- holmes/plugins/prompts/conversation_history_compaction.jinja2 +2 -1
- holmes/plugins/prompts/generic_ask.jinja2 +0 -2
- holmes/plugins/prompts/generic_ask_conversation.jinja2 +0 -2
- holmes/plugins/prompts/generic_ask_for_issue_conversation.jinja2 +0 -2
- holmes/plugins/prompts/generic_investigation.jinja2 +0 -2
- holmes/plugins/prompts/investigation_procedure.jinja2 +2 -1
- holmes/plugins/prompts/kubernetes_workload_ask.jinja2 +0 -2
- holmes/plugins/prompts/kubernetes_workload_chat.jinja2 +0 -2
- holmes/plugins/runbooks/__init__.py +32 -3
- holmes/plugins/sources/github/__init__.py +4 -2
- holmes/plugins/sources/prometheus/models.py +1 -0
- holmes/plugins/toolsets/__init__.py +30 -26
- holmes/plugins/toolsets/atlas_mongodb/mongodb_atlas.py +13 -12
- holmes/plugins/toolsets/azure_sql/apis/alert_monitoring_api.py +3 -2
- holmes/plugins/toolsets/azure_sql/apis/azure_sql_api.py +2 -1
- holmes/plugins/toolsets/azure_sql/apis/connection_failure_api.py +3 -2
- holmes/plugins/toolsets/azure_sql/apis/connection_monitoring_api.py +3 -1
- holmes/plugins/toolsets/azure_sql/apis/storage_analysis_api.py +3 -1
- holmes/plugins/toolsets/azure_sql/azure_sql_toolset.py +12 -12
- holmes/plugins/toolsets/azure_sql/tools/analyze_connection_failures.py +7 -7
- holmes/plugins/toolsets/azure_sql/tools/analyze_database_connections.py +7 -7
- holmes/plugins/toolsets/azure_sql/tools/analyze_database_health_status.py +3 -5
- holmes/plugins/toolsets/azure_sql/tools/analyze_database_performance.py +3 -3
- holmes/plugins/toolsets/azure_sql/tools/analyze_database_storage.py +7 -7
- holmes/plugins/toolsets/azure_sql/tools/get_active_alerts.py +6 -8
- holmes/plugins/toolsets/azure_sql/tools/get_slow_queries.py +3 -3
- holmes/plugins/toolsets/azure_sql/tools/get_top_cpu_queries.py +3 -3
- holmes/plugins/toolsets/azure_sql/tools/get_top_data_io_queries.py +3 -3
- holmes/plugins/toolsets/azure_sql/tools/get_top_log_io_queries.py +3 -3
- holmes/plugins/toolsets/azure_sql/utils.py +0 -32
- holmes/plugins/toolsets/bash/argocd/__init__.py +3 -3
- holmes/plugins/toolsets/bash/aws/__init__.py +4 -4
- holmes/plugins/toolsets/bash/azure/__init__.py +4 -4
- holmes/plugins/toolsets/bash/bash_toolset.py +2 -3
- holmes/plugins/toolsets/bash/common/bash.py +19 -9
- holmes/plugins/toolsets/bash/common/bash_command.py +1 -1
- holmes/plugins/toolsets/bash/common/stringify.py +1 -1
- holmes/plugins/toolsets/bash/kubectl/__init__.py +2 -1
- holmes/plugins/toolsets/bash/kubectl/constants.py +0 -1
- holmes/plugins/toolsets/bash/kubectl/kubectl_get.py +3 -4
- holmes/plugins/toolsets/bash/parse_command.py +12 -13
- holmes/plugins/toolsets/connectivity_check.py +124 -0
- holmes/plugins/toolsets/coralogix/api.py +132 -119
- holmes/plugins/toolsets/coralogix/coralogix.jinja2 +14 -0
- holmes/plugins/toolsets/coralogix/toolset_coralogix.py +219 -0
- holmes/plugins/toolsets/coralogix/utils.py +15 -79
- holmes/plugins/toolsets/datadog/datadog_api.py +36 -3
- holmes/plugins/toolsets/datadog/datadog_logs_instructions.jinja2 +34 -1
- holmes/plugins/toolsets/datadog/datadog_metrics_instructions.jinja2 +3 -3
- holmes/plugins/toolsets/datadog/datadog_models.py +59 -0
- holmes/plugins/toolsets/datadog/datadog_url_utils.py +213 -0
- holmes/plugins/toolsets/datadog/instructions_datadog_traces.jinja2 +165 -28
- holmes/plugins/toolsets/datadog/toolset_datadog_general.py +71 -28
- holmes/plugins/toolsets/datadog/toolset_datadog_logs.py +224 -375
- holmes/plugins/toolsets/datadog/toolset_datadog_metrics.py +67 -36
- holmes/plugins/toolsets/datadog/toolset_datadog_traces.py +360 -343
- holmes/plugins/toolsets/elasticsearch/__init__.py +6 -0
- holmes/plugins/toolsets/elasticsearch/elasticsearch.py +834 -0
- holmes/plugins/toolsets/git.py +7 -8
- holmes/plugins/toolsets/grafana/base_grafana_toolset.py +16 -4
- holmes/plugins/toolsets/grafana/common.py +2 -30
- holmes/plugins/toolsets/grafana/grafana_tempo_api.py +2 -1
- holmes/plugins/toolsets/grafana/loki/instructions.jinja2 +18 -2
- holmes/plugins/toolsets/grafana/loki/toolset_grafana_loki.py +92 -18
- holmes/plugins/toolsets/grafana/loki_api.py +4 -0
- holmes/plugins/toolsets/grafana/toolset_grafana.py +109 -25
- holmes/plugins/toolsets/grafana/toolset_grafana_dashboard.jinja2 +22 -0
- holmes/plugins/toolsets/grafana/toolset_grafana_tempo.py +201 -33
- holmes/plugins/toolsets/grafana/trace_parser.py +3 -2
- holmes/plugins/toolsets/internet/internet.py +10 -10
- holmes/plugins/toolsets/internet/notion.py +5 -6
- holmes/plugins/toolsets/investigator/core_investigation.py +3 -3
- holmes/plugins/toolsets/investigator/model.py +3 -1
- holmes/plugins/toolsets/json_filter_mixin.py +134 -0
- holmes/plugins/toolsets/kafka.py +12 -7
- holmes/plugins/toolsets/kubernetes.yaml +260 -30
- holmes/plugins/toolsets/kubernetes_logs.py +3 -3
- holmes/plugins/toolsets/logging_utils/logging_api.py +16 -6
- holmes/plugins/toolsets/mcp/toolset_mcp.py +88 -60
- holmes/plugins/toolsets/newrelic/new_relic_api.py +41 -1
- holmes/plugins/toolsets/newrelic/newrelic.jinja2 +24 -0
- holmes/plugins/toolsets/newrelic/newrelic.py +212 -55
- holmes/plugins/toolsets/prometheus/prometheus.py +358 -102
- holmes/plugins/toolsets/prometheus/prometheus_instructions.jinja2 +11 -3
- holmes/plugins/toolsets/rabbitmq/api.py +23 -4
- holmes/plugins/toolsets/rabbitmq/toolset_rabbitmq.py +5 -5
- holmes/plugins/toolsets/robusta/robusta.py +5 -5
- holmes/plugins/toolsets/runbook/runbook_fetcher.py +25 -6
- holmes/plugins/toolsets/servicenow_tables/servicenow_tables.py +1 -1
- holmes/plugins/toolsets/utils.py +1 -1
- holmes/utils/config_utils.py +1 -1
- holmes/utils/connection_utils.py +31 -0
- holmes/utils/console/result.py +10 -0
- holmes/utils/file_utils.py +2 -1
- holmes/utils/global_instructions.py +10 -26
- holmes/utils/holmes_status.py +4 -3
- holmes/utils/log.py +15 -0
- holmes/utils/markdown_utils.py +2 -3
- holmes/utils/memory_limit.py +58 -0
- holmes/utils/sentry_helper.py +23 -0
- holmes/utils/stream.py +12 -5
- holmes/utils/tags.py +4 -3
- holmes/version.py +3 -1
- {holmesgpt-0.16.2a0.dist-info → holmesgpt-0.18.4.dist-info}/METADATA +12 -10
- holmesgpt-0.18.4.dist-info/RECORD +258 -0
- holmes/plugins/toolsets/aws.yaml +0 -80
- holmes/plugins/toolsets/coralogix/toolset_coralogix_logs.py +0 -114
- holmes/plugins/toolsets/datadog/datadog_traces_formatter.py +0 -310
- holmes/plugins/toolsets/datadog/toolset_datadog_rds.py +0 -736
- holmes/plugins/toolsets/grafana/grafana_api.py +0 -64
- holmes/plugins/toolsets/opensearch/__init__.py +0 -0
- holmes/plugins/toolsets/opensearch/opensearch.py +0 -250
- holmes/plugins/toolsets/opensearch/opensearch_logs.py +0 -161
- holmes/plugins/toolsets/opensearch/opensearch_traces.py +0 -215
- holmes/plugins/toolsets/opensearch/opensearch_traces_instructions.jinja2 +0 -12
- holmes/plugins/toolsets/opensearch/opensearch_utils.py +0 -166
- holmes/utils/keygen_utils.py +0 -6
- holmesgpt-0.16.2a0.dist-info/RECORD +0 -258
- holmes/plugins/toolsets/{opensearch → elasticsearch}/opensearch_ppl_query_docs.jinja2 +0 -0
- holmes/plugins/toolsets/{opensearch → elasticsearch}/opensearch_query_assist.py +2 -2
- /holmes/plugins/toolsets/{opensearch → elasticsearch}/opensearch_query_assist_instructions.jinja2 +0 -0
- {holmesgpt-0.16.2a0.dist-info → holmesgpt-0.18.4.dist-info}/LICENSE +0 -0
- {holmesgpt-0.16.2a0.dist-info → holmesgpt-0.18.4.dist-info}/WHEEL +0 -0
- {holmesgpt-0.16.2a0.dist-info → holmesgpt-0.18.4.dist-info}/entry_points.txt +0 -0
|
@@ -1,310 +0,0 @@
|
|
|
1
|
-
"""Formatting utilities for Datadog traces output."""
|
|
2
|
-
|
|
3
|
-
from collections import defaultdict
|
|
4
|
-
from datetime import datetime
|
|
5
|
-
from typing import Any, Dict, List, Tuple
|
|
6
|
-
|
|
7
|
-
from holmes.plugins.toolsets.utils import unix_nano_to_rfc3339
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
def parse_datadog_span_timestamp(attrs: Dict[str, Any]) -> Tuple[int, int]:
|
|
11
|
-
"""
|
|
12
|
-
Parse timestamp and duration from Datadog span attributes.
|
|
13
|
-
|
|
14
|
-
Returns:
|
|
15
|
-
Tuple of (start_ns, duration_ns)
|
|
16
|
-
"""
|
|
17
|
-
custom = attrs.get("custom", {})
|
|
18
|
-
|
|
19
|
-
# Get timestamp and convert to nanoseconds
|
|
20
|
-
start_timestamp = attrs.get("start_timestamp", "")
|
|
21
|
-
# Check for duration in both custom and direct attributes
|
|
22
|
-
duration_ns = custom.get("duration", 0) or attrs.get("duration", 0)
|
|
23
|
-
|
|
24
|
-
# Check for start time in nanoseconds directly first
|
|
25
|
-
start_ns = attrs.get("start", 0)
|
|
26
|
-
|
|
27
|
-
# If not found, try to parse from timestamp string
|
|
28
|
-
if not start_ns and start_timestamp:
|
|
29
|
-
try:
|
|
30
|
-
dt = datetime.fromisoformat(start_timestamp.replace("Z", "+00:00"))
|
|
31
|
-
start_ns = int(dt.timestamp() * 1_000_000_000)
|
|
32
|
-
except (ValueError, TypeError):
|
|
33
|
-
start_ns = 0
|
|
34
|
-
|
|
35
|
-
return start_ns, duration_ns
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
def format_traces_list(spans: List[Dict[str, Any]], limit: int = 50) -> str:
|
|
39
|
-
"""
|
|
40
|
-
Format a list of spans grouped by trace ID into a readable output.
|
|
41
|
-
"""
|
|
42
|
-
if not spans:
|
|
43
|
-
return ""
|
|
44
|
-
|
|
45
|
-
# Group spans by trace_id
|
|
46
|
-
traces = defaultdict(list)
|
|
47
|
-
for span in spans:
|
|
48
|
-
trace_id = span.get("attributes", {}).get("trace_id")
|
|
49
|
-
if trace_id:
|
|
50
|
-
traces[trace_id].append(span)
|
|
51
|
-
|
|
52
|
-
# Format output
|
|
53
|
-
output_lines = []
|
|
54
|
-
output_lines.append(f"Found {len(traces)} traces with matching spans")
|
|
55
|
-
output_lines.append("")
|
|
56
|
-
|
|
57
|
-
for trace_id, trace_spans in list(traces.items())[:limit]:
|
|
58
|
-
# Find root span and calculate trace duration
|
|
59
|
-
root_span = None
|
|
60
|
-
min_start = float("inf")
|
|
61
|
-
max_end = 0
|
|
62
|
-
|
|
63
|
-
for span in trace_spans:
|
|
64
|
-
attrs = span.get("attributes", {})
|
|
65
|
-
start_ns, duration_ns = parse_datadog_span_timestamp(attrs)
|
|
66
|
-
end_ns = start_ns + duration_ns
|
|
67
|
-
|
|
68
|
-
if start_ns > 0 and start_ns < min_start:
|
|
69
|
-
min_start = start_ns
|
|
70
|
-
|
|
71
|
-
if end_ns > max_end:
|
|
72
|
-
max_end = end_ns
|
|
73
|
-
|
|
74
|
-
# Check if this is a root span (no parent_id)
|
|
75
|
-
if not attrs.get("parent_id"):
|
|
76
|
-
root_span = span
|
|
77
|
-
|
|
78
|
-
# If no root span found, use the first span
|
|
79
|
-
if not root_span and trace_spans:
|
|
80
|
-
root_span = trace_spans[0]
|
|
81
|
-
|
|
82
|
-
# Calculate duration, handling edge cases
|
|
83
|
-
if min_start == float("inf") or max_end == 0:
|
|
84
|
-
trace_duration_ms = 0.0
|
|
85
|
-
else:
|
|
86
|
-
trace_duration_ms = (max_end - min_start) / 1_000_000
|
|
87
|
-
|
|
88
|
-
if root_span:
|
|
89
|
-
attrs = root_span.get("attributes", {})
|
|
90
|
-
service_name = attrs.get("service", "unknown")
|
|
91
|
-
operation_name = attrs.get("operation_name", "unknown")
|
|
92
|
-
start_time_str = (
|
|
93
|
-
unix_nano_to_rfc3339(min_start)
|
|
94
|
-
if min_start != float("inf")
|
|
95
|
-
else "unknown"
|
|
96
|
-
)
|
|
97
|
-
|
|
98
|
-
output_lines.append(
|
|
99
|
-
f"Trace (traceID={trace_id}) (durationMs={trace_duration_ms:.2f})"
|
|
100
|
-
)
|
|
101
|
-
output_lines.append(
|
|
102
|
-
f"\tstartTime={start_time_str} rootServiceName={service_name} rootTraceName={operation_name}"
|
|
103
|
-
)
|
|
104
|
-
|
|
105
|
-
return "\n".join(output_lines)
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
def build_span_hierarchy(
|
|
109
|
-
spans: List[Dict[str, Any]],
|
|
110
|
-
) -> Tuple[Dict[str, Dict], List[Dict]]:
|
|
111
|
-
"""
|
|
112
|
-
Build a hierarchy of spans from a flat list.
|
|
113
|
-
|
|
114
|
-
Returns:
|
|
115
|
-
Tuple of (span_map, root_spans)
|
|
116
|
-
"""
|
|
117
|
-
span_map = {}
|
|
118
|
-
root_spans = []
|
|
119
|
-
|
|
120
|
-
# First pass: create span objects
|
|
121
|
-
for span_data in spans:
|
|
122
|
-
attrs = span_data.get("attributes", {})
|
|
123
|
-
span_id = attrs.get("span_id", "")
|
|
124
|
-
parent_id = attrs.get("parent_id", "")
|
|
125
|
-
|
|
126
|
-
start_ns, duration_ns = parse_datadog_span_timestamp(attrs)
|
|
127
|
-
|
|
128
|
-
span_obj = {
|
|
129
|
-
"span_id": span_id,
|
|
130
|
-
"parent_id": parent_id,
|
|
131
|
-
"name": attrs.get("operation_name", "unknown"),
|
|
132
|
-
"service": attrs.get("service", "unknown"),
|
|
133
|
-
"resource": attrs.get("resource_name", ""),
|
|
134
|
-
"start_ns": start_ns,
|
|
135
|
-
"duration_ns": duration_ns,
|
|
136
|
-
"status": attrs.get("status", ""),
|
|
137
|
-
"tags": attrs.get("tags", []),
|
|
138
|
-
"children": [],
|
|
139
|
-
"attributes": attrs,
|
|
140
|
-
}
|
|
141
|
-
|
|
142
|
-
span_map[span_id] = span_obj
|
|
143
|
-
|
|
144
|
-
if not parent_id:
|
|
145
|
-
root_spans.append(span_obj)
|
|
146
|
-
|
|
147
|
-
# Second pass: build hierarchy
|
|
148
|
-
for span_obj in span_map.values():
|
|
149
|
-
parent_id = span_obj["parent_id"]
|
|
150
|
-
if parent_id and parent_id in span_map:
|
|
151
|
-
span_map[parent_id]["children"].append(span_obj)
|
|
152
|
-
elif parent_id and parent_id not in span_map:
|
|
153
|
-
# This is an orphaned span (parent not in trace)
|
|
154
|
-
root_spans.append(span_obj)
|
|
155
|
-
|
|
156
|
-
return span_map, root_spans
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
def format_trace_hierarchy(trace_id: str, spans: List[Dict[str, Any]]) -> str:
|
|
160
|
-
"""
|
|
161
|
-
Format a trace with its full span hierarchy.
|
|
162
|
-
"""
|
|
163
|
-
if not spans:
|
|
164
|
-
return ""
|
|
165
|
-
|
|
166
|
-
span_map, root_spans = build_span_hierarchy(spans)
|
|
167
|
-
|
|
168
|
-
# Format output
|
|
169
|
-
output_lines = []
|
|
170
|
-
output_lines.append(f"Trace ID: {trace_id}")
|
|
171
|
-
output_lines.append("")
|
|
172
|
-
|
|
173
|
-
def format_span_tree(span: Dict[str, Any], level: int = 0) -> None:
|
|
174
|
-
indent = " " * level
|
|
175
|
-
duration_ms = span["duration_ns"] / 1_000_000
|
|
176
|
-
|
|
177
|
-
output_lines.append(
|
|
178
|
-
f"{indent}├─ {span['name']} ({span['service']}) - {duration_ms:.2f}ms (span_id={span['span_id']})"
|
|
179
|
-
)
|
|
180
|
-
|
|
181
|
-
start_time_str = unix_nano_to_rfc3339(span["start_ns"])
|
|
182
|
-
end_time_ns = span["start_ns"] + span["duration_ns"]
|
|
183
|
-
end_time_str = unix_nano_to_rfc3339(end_time_ns)
|
|
184
|
-
|
|
185
|
-
output_lines.append(
|
|
186
|
-
f"{indent}│ Datetime: start={start_time_str} end={end_time_str}"
|
|
187
|
-
)
|
|
188
|
-
|
|
189
|
-
if span["resource"]:
|
|
190
|
-
output_lines.append(f"{indent}│ Resource: {span['resource']}")
|
|
191
|
-
|
|
192
|
-
if span["status"]:
|
|
193
|
-
output_lines.append(f"{indent}│ Status: {span['status']}")
|
|
194
|
-
|
|
195
|
-
# Show important tags
|
|
196
|
-
important_tags = [
|
|
197
|
-
"env",
|
|
198
|
-
"version",
|
|
199
|
-
"http.method",
|
|
200
|
-
"http.status_code",
|
|
201
|
-
"error.type",
|
|
202
|
-
"error.message",
|
|
203
|
-
]
|
|
204
|
-
tags_to_show = {}
|
|
205
|
-
|
|
206
|
-
for tag in span["tags"]:
|
|
207
|
-
if isinstance(tag, str) and ":" in tag:
|
|
208
|
-
key, value = tag.split(":", 1)
|
|
209
|
-
if key in important_tags:
|
|
210
|
-
tags_to_show[key] = value
|
|
211
|
-
|
|
212
|
-
if tags_to_show:
|
|
213
|
-
output_lines.append(f"{indent}│ Tags:")
|
|
214
|
-
for key, value in tags_to_show.items():
|
|
215
|
-
output_lines.append(f"{indent}│ {key}: {value}")
|
|
216
|
-
|
|
217
|
-
# Sort children by start time
|
|
218
|
-
sorted_children = sorted(span["children"], key=lambda s: s["start_ns"])
|
|
219
|
-
for child in sorted_children:
|
|
220
|
-
format_span_tree(child, level + 1)
|
|
221
|
-
|
|
222
|
-
# Format all root spans
|
|
223
|
-
for root_span in sorted(root_spans, key=lambda s: s["start_ns"]):
|
|
224
|
-
format_span_tree(root_span)
|
|
225
|
-
|
|
226
|
-
return "\n".join(output_lines)
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
def format_spans_search(
|
|
230
|
-
spans: List[Dict[str, Any]], max_traces: int = 50, max_spans_per_trace: int = 10
|
|
231
|
-
) -> str:
|
|
232
|
-
"""
|
|
233
|
-
Format spans search results grouped by trace.
|
|
234
|
-
"""
|
|
235
|
-
if not spans:
|
|
236
|
-
return ""
|
|
237
|
-
|
|
238
|
-
# Format output
|
|
239
|
-
output_lines = []
|
|
240
|
-
output_lines.append(f"Found {len(spans)} matching spans")
|
|
241
|
-
output_lines.append("")
|
|
242
|
-
|
|
243
|
-
# Group spans by trace for better readability
|
|
244
|
-
spans_by_trace = defaultdict(list)
|
|
245
|
-
for span in spans:
|
|
246
|
-
trace_id = span.get("attributes", {}).get("trace_id", "unknown")
|
|
247
|
-
spans_by_trace[trace_id].append(span)
|
|
248
|
-
|
|
249
|
-
output_lines.append(f"Spans grouped by {len(spans_by_trace)} traces:")
|
|
250
|
-
output_lines.append("")
|
|
251
|
-
|
|
252
|
-
for trace_id, trace_spans in list(spans_by_trace.items())[:max_traces]:
|
|
253
|
-
output_lines.append(f"Trace ID: {trace_id}")
|
|
254
|
-
|
|
255
|
-
# Sort spans by timestamp within each trace
|
|
256
|
-
sorted_spans = sorted(
|
|
257
|
-
trace_spans,
|
|
258
|
-
key=lambda s: parse_datadog_span_timestamp(s.get("attributes", {}))[0],
|
|
259
|
-
)
|
|
260
|
-
|
|
261
|
-
for span in sorted_spans[:max_spans_per_trace]:
|
|
262
|
-
attrs = span.get("attributes", {})
|
|
263
|
-
|
|
264
|
-
span_id = attrs.get("span_id", "unknown")
|
|
265
|
-
service = attrs.get("service", "unknown")
|
|
266
|
-
operation = attrs.get("operation_name", "unknown")
|
|
267
|
-
resource = attrs.get("resource_name", "")
|
|
268
|
-
|
|
269
|
-
start_ns, duration_ns = parse_datadog_span_timestamp(attrs)
|
|
270
|
-
duration_ms = duration_ns / 1_000_000
|
|
271
|
-
start_time_str = unix_nano_to_rfc3339(start_ns)
|
|
272
|
-
|
|
273
|
-
output_lines.append(f" ├─ {operation} ({service}) - {duration_ms:.2f}ms")
|
|
274
|
-
output_lines.append(f" │ span_id: {span_id}")
|
|
275
|
-
output_lines.append(f" │ time: {start_time_str}")
|
|
276
|
-
|
|
277
|
-
if resource:
|
|
278
|
-
output_lines.append(f" │ resource: {resource}")
|
|
279
|
-
|
|
280
|
-
# Show status if error
|
|
281
|
-
status = attrs.get("status", "")
|
|
282
|
-
if status and status != "ok":
|
|
283
|
-
output_lines.append(f" │ status: {status}")
|
|
284
|
-
|
|
285
|
-
# Show important tags
|
|
286
|
-
tags = attrs.get("tags", [])
|
|
287
|
-
important_tags = {}
|
|
288
|
-
for tag in tags:
|
|
289
|
-
if isinstance(tag, str) and ":" in tag:
|
|
290
|
-
key, value = tag.split(":", 1)
|
|
291
|
-
if key in ["env", "version", "http.status_code", "error.type"]:
|
|
292
|
-
important_tags[key] = value
|
|
293
|
-
|
|
294
|
-
if important_tags:
|
|
295
|
-
tags_str = ", ".join([f"{k}={v}" for k, v in important_tags.items()])
|
|
296
|
-
output_lines.append(f" │ tags: {tags_str}")
|
|
297
|
-
|
|
298
|
-
output_lines.append(" │")
|
|
299
|
-
|
|
300
|
-
if len(trace_spans) > max_spans_per_trace:
|
|
301
|
-
output_lines.append(
|
|
302
|
-
f" └─ ... and {len(trace_spans) - max_spans_per_trace} more spans in this trace"
|
|
303
|
-
)
|
|
304
|
-
|
|
305
|
-
output_lines.append("")
|
|
306
|
-
|
|
307
|
-
if len(spans_by_trace) > max_traces:
|
|
308
|
-
output_lines.append(f"... and {len(spans_by_trace) - max_traces} more traces")
|
|
309
|
-
|
|
310
|
-
return "\n".join(output_lines)
|