gitmap-core 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gitmap_core/README.md +46 -0
- gitmap_core/__init__.py +100 -0
- gitmap_core/communication.py +346 -0
- gitmap_core/compat.py +408 -0
- gitmap_core/connection.py +232 -0
- gitmap_core/context.py +709 -0
- gitmap_core/diff.py +283 -0
- gitmap_core/maps.py +385 -0
- gitmap_core/merge.py +449 -0
- gitmap_core/models.py +332 -0
- gitmap_core/py.typed +0 -0
- gitmap_core/pyproject.toml +48 -0
- gitmap_core/remote.py +728 -0
- gitmap_core/repository.py +1632 -0
- gitmap_core/tests/__init__.py +1 -0
- gitmap_core/tests/test_communication.py +695 -0
- gitmap_core/tests/test_compat.py +310 -0
- gitmap_core/tests/test_connection.py +314 -0
- gitmap_core/tests/test_context.py +814 -0
- gitmap_core/tests/test_diff.py +567 -0
- gitmap_core/tests/test_init.py +153 -0
- gitmap_core/tests/test_maps.py +642 -0
- gitmap_core/tests/test_merge.py +694 -0
- gitmap_core/tests/test_models.py +410 -0
- gitmap_core/tests/test_remote.py +3014 -0
- gitmap_core/tests/test_repository.py +1639 -0
- gitmap_core/tests/test_visualize.py +902 -0
- gitmap_core/visualize.py +1217 -0
- gitmap_core-0.1.0.dist-info/METADATA +961 -0
- gitmap_core-0.1.0.dist-info/RECORD +32 -0
- gitmap_core-0.1.0.dist-info/WHEEL +4 -0
- gitmap_core-0.1.0.dist-info/licenses/LICENSE +21 -0
gitmap_core/visualize.py
ADDED
|
@@ -0,0 +1,1217 @@
|
|
|
1
|
+
"""Context graph visualization for GitMap.
|
|
2
|
+
|
|
3
|
+
Provides lightweight visualization of context graph data in formats
|
|
4
|
+
viewable directly in IDEs: Mermaid diagrams, ASCII art, and HTML.
|
|
5
|
+
|
|
6
|
+
Execution Context:
|
|
7
|
+
Library module - imported by CLI and MCP tools
|
|
8
|
+
|
|
9
|
+
Dependencies:
|
|
10
|
+
- None (stdlib only for lightweight deployment)
|
|
11
|
+
|
|
12
|
+
Metadata:
|
|
13
|
+
Version: 0.1.0
|
|
14
|
+
Author: GitMap Team
|
|
15
|
+
"""
|
|
16
|
+
from __future__ import annotations
|
|
17
|
+
|
|
18
|
+
import html
|
|
19
|
+
import re
|
|
20
|
+
from dataclasses import dataclass
|
|
21
|
+
from datetime import datetime
|
|
22
|
+
from typing import Any
|
|
23
|
+
|
|
24
|
+
from gitmap_core.context import Annotation
|
|
25
|
+
from gitmap_core.context import ContextStore
|
|
26
|
+
from gitmap_core.context import Edge
|
|
27
|
+
from gitmap_core.context import Event
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
# ---- Configuration Constants ---------------------------------------------------------------------------------
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
# Event type icons for Mermaid flowcharts
|
|
34
|
+
EVENT_ICONS = {
|
|
35
|
+
"commit": "fa:fa-code-commit",
|
|
36
|
+
"push": "fa:fa-cloud-upload",
|
|
37
|
+
"pull": "fa:fa-cloud-download",
|
|
38
|
+
"merge": "fa:fa-code-merge",
|
|
39
|
+
"branch": "fa:fa-code-branch",
|
|
40
|
+
"lsm": "fa:fa-layer-group",
|
|
41
|
+
"diff": "fa:fa-file-diff",
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
# Event type shapes for Mermaid
|
|
45
|
+
# Using simpler shapes that render reliably across Mermaid versions
|
|
46
|
+
EVENT_SHAPES = {
|
|
47
|
+
"commit": ("([", "])"), # Stadium shape (pill)
|
|
48
|
+
"push": ("[[", "]]"), # Subroutine shape (double border)
|
|
49
|
+
"pull": ("[[", "]]"), # Subroutine shape (double border)
|
|
50
|
+
"merge": ("{{", "}}"), # Hexagon shape
|
|
51
|
+
"branch": ("[", "]"), # Rectangle (simple, reliable)
|
|
52
|
+
"lsm": ("[", "]"), # Rectangle (trapezoid syntax unreliable)
|
|
53
|
+
"diff": ("[", "]"), # Rectangle (parallelogram syntax unreliable)
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
# Relationship arrow styles for Mermaid
|
|
57
|
+
EDGE_STYLES = {
|
|
58
|
+
"caused_by": "-->", # Standard arrow
|
|
59
|
+
"reverts": "-. reverts .->", # Dotted with label
|
|
60
|
+
"related_to": "---", # Line without arrow
|
|
61
|
+
"learned_from": "-.->", # Dotted arrow
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
# ASCII box characters
|
|
65
|
+
ASCII_BOX = {
|
|
66
|
+
"tl": "┌",
|
|
67
|
+
"tr": "┐",
|
|
68
|
+
"bl": "└",
|
|
69
|
+
"br": "┘",
|
|
70
|
+
"h": "─",
|
|
71
|
+
"v": "│",
|
|
72
|
+
"t": "┬",
|
|
73
|
+
"b": "┴",
|
|
74
|
+
"l": "├",
|
|
75
|
+
"r": "┤",
|
|
76
|
+
"c": "┼",
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
# Simple ASCII (fallback)
|
|
80
|
+
ASCII_BOX_SIMPLE = {
|
|
81
|
+
"tl": "+",
|
|
82
|
+
"tr": "+",
|
|
83
|
+
"bl": "+",
|
|
84
|
+
"br": "+",
|
|
85
|
+
"h": "-",
|
|
86
|
+
"v": "|",
|
|
87
|
+
"t": "+",
|
|
88
|
+
"b": "+",
|
|
89
|
+
"l": "+",
|
|
90
|
+
"r": "+",
|
|
91
|
+
"c": "+",
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
# ---- Data Classes --------------------------------------------------------------------------------------------
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
@dataclass
|
|
99
|
+
class GraphData:
|
|
100
|
+
"""Container for graph visualization data.
|
|
101
|
+
|
|
102
|
+
Attributes:
|
|
103
|
+
events: List of events to visualize.
|
|
104
|
+
edges: List of edges connecting events.
|
|
105
|
+
annotations: Dictionary mapping event IDs to their annotations.
|
|
106
|
+
"""
|
|
107
|
+
|
|
108
|
+
events: list[Event]
|
|
109
|
+
edges: list[Edge]
|
|
110
|
+
annotations: dict[str, list[Annotation]]
|
|
111
|
+
|
|
112
|
+
@classmethod
|
|
113
|
+
def from_context_store(
|
|
114
|
+
cls,
|
|
115
|
+
store: ContextStore,
|
|
116
|
+
limit: int = 50,
|
|
117
|
+
event_types: list[str] | None = None,
|
|
118
|
+
) -> GraphData:
|
|
119
|
+
"""Build graph data from context store.
|
|
120
|
+
|
|
121
|
+
Args:
|
|
122
|
+
store: Context store to query.
|
|
123
|
+
limit: Maximum events to include.
|
|
124
|
+
event_types: Filter by event types.
|
|
125
|
+
|
|
126
|
+
Returns:
|
|
127
|
+
GraphData instance with events, edges, and annotations.
|
|
128
|
+
"""
|
|
129
|
+
# Get events
|
|
130
|
+
conn = store._connection
|
|
131
|
+
if event_types:
|
|
132
|
+
placeholders = ",".join("?" for _ in event_types)
|
|
133
|
+
cursor = conn.execute(
|
|
134
|
+
f"""
|
|
135
|
+
SELECT * FROM events
|
|
136
|
+
WHERE event_type IN ({placeholders})
|
|
137
|
+
ORDER BY timestamp DESC
|
|
138
|
+
LIMIT ?
|
|
139
|
+
""",
|
|
140
|
+
[*event_types, limit],
|
|
141
|
+
)
|
|
142
|
+
else:
|
|
143
|
+
cursor = conn.execute(
|
|
144
|
+
"""
|
|
145
|
+
SELECT * FROM events
|
|
146
|
+
ORDER BY timestamp DESC
|
|
147
|
+
LIMIT ?
|
|
148
|
+
""",
|
|
149
|
+
[limit],
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
events = [Event.from_row(row) for row in cursor.fetchall()]
|
|
153
|
+
event_ids = {e.id for e in events}
|
|
154
|
+
|
|
155
|
+
# Get edges between these events
|
|
156
|
+
edges = []
|
|
157
|
+
if event_ids:
|
|
158
|
+
placeholders = ",".join("?" for _ in event_ids)
|
|
159
|
+
cursor = conn.execute(
|
|
160
|
+
f"""
|
|
161
|
+
SELECT * FROM edges
|
|
162
|
+
WHERE source_id IN ({placeholders})
|
|
163
|
+
AND target_id IN ({placeholders})
|
|
164
|
+
""",
|
|
165
|
+
[*event_ids, *event_ids],
|
|
166
|
+
)
|
|
167
|
+
edges = [Edge.from_row(row) for row in cursor.fetchall()]
|
|
168
|
+
|
|
169
|
+
# Get annotations for all events
|
|
170
|
+
annotations: dict[str, list[Annotation]] = {}
|
|
171
|
+
for event in events:
|
|
172
|
+
annotations[event.id] = store.get_annotations(event.id)
|
|
173
|
+
|
|
174
|
+
return cls(events=events, edges=edges, annotations=annotations)
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
# ---- Mermaid Generation --------------------------------------------------------------------------------------
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def _sanitize_mermaid_text(text: str) -> str:
|
|
181
|
+
"""Sanitize text for Mermaid diagram inclusion.
|
|
182
|
+
|
|
183
|
+
Args:
|
|
184
|
+
text: Text to sanitize.
|
|
185
|
+
|
|
186
|
+
Returns:
|
|
187
|
+
Sanitized text safe for Mermaid.
|
|
188
|
+
"""
|
|
189
|
+
# Replace problematic characters
|
|
190
|
+
text = text.replace('"', "'")
|
|
191
|
+
text = text.replace("\n", " ")
|
|
192
|
+
text = text.replace("[", "(")
|
|
193
|
+
text = text.replace("]", ")")
|
|
194
|
+
text = text.replace("{", "(")
|
|
195
|
+
text = text.replace("}", ")")
|
|
196
|
+
text = text.replace("<", "‹")
|
|
197
|
+
text = text.replace(">", "›")
|
|
198
|
+
text = text.replace("#", "")
|
|
199
|
+
# Truncate long text
|
|
200
|
+
if len(text) > 40:
|
|
201
|
+
text = text[:37] + "..."
|
|
202
|
+
return text
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def _format_event_label(event: Event, show_time: bool = True) -> str:
|
|
206
|
+
"""Format event label for display.
|
|
207
|
+
|
|
208
|
+
Args:
|
|
209
|
+
event: Event to format.
|
|
210
|
+
show_time: Include timestamp in label.
|
|
211
|
+
|
|
212
|
+
Returns:
|
|
213
|
+
Formatted label string.
|
|
214
|
+
"""
|
|
215
|
+
# Parse timestamp for display
|
|
216
|
+
try:
|
|
217
|
+
dt = datetime.fromisoformat(event.timestamp)
|
|
218
|
+
time_str = dt.strftime("%m/%d %H:%M")
|
|
219
|
+
except (ValueError, TypeError):
|
|
220
|
+
time_str = event.timestamp[:16] if event.timestamp else ""
|
|
221
|
+
|
|
222
|
+
# Get short ref
|
|
223
|
+
ref_str = ""
|
|
224
|
+
if event.ref:
|
|
225
|
+
ref_str = event.ref[:8] if len(event.ref) > 8 else event.ref
|
|
226
|
+
|
|
227
|
+
# Build label parts
|
|
228
|
+
parts = [event.event_type.upper()]
|
|
229
|
+
if ref_str:
|
|
230
|
+
parts.append(ref_str)
|
|
231
|
+
if show_time:
|
|
232
|
+
parts.append(time_str)
|
|
233
|
+
|
|
234
|
+
return " | ".join(parts)
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
def generate_mermaid_flowchart(
|
|
238
|
+
data: GraphData,
|
|
239
|
+
direction: str = "TB",
|
|
240
|
+
show_annotations: bool = True,
|
|
241
|
+
title: str | None = None,
|
|
242
|
+
) -> str:
|
|
243
|
+
"""Generate Mermaid flowchart from graph data.
|
|
244
|
+
|
|
245
|
+
Args:
|
|
246
|
+
data: Graph data to visualize.
|
|
247
|
+
direction: Graph direction (TB, BT, LR, RL).
|
|
248
|
+
show_annotations: Include annotation nodes.
|
|
249
|
+
title: Optional chart title.
|
|
250
|
+
|
|
251
|
+
Returns:
|
|
252
|
+
Mermaid flowchart diagram string.
|
|
253
|
+
"""
|
|
254
|
+
lines = []
|
|
255
|
+
|
|
256
|
+
# Header with direction
|
|
257
|
+
lines.append(f"flowchart {direction}")
|
|
258
|
+
|
|
259
|
+
# Add title as comment if provided
|
|
260
|
+
if title:
|
|
261
|
+
lines.insert(0, f"%% {title}")
|
|
262
|
+
|
|
263
|
+
# Track merge commits for special handling
|
|
264
|
+
merge_commits: set[str] = set()
|
|
265
|
+
# Track commit refs that have merge events (to deduplicate)
|
|
266
|
+
merge_commit_refs: set[str] = set()
|
|
267
|
+
# Events to skip (duplicates)
|
|
268
|
+
skip_events: set[str] = set()
|
|
269
|
+
# Map merge commit IDs to their source branch (from merge events)
|
|
270
|
+
merge_source_branches: dict[str, str] = {} # commit_id[:8] -> source_branch
|
|
271
|
+
|
|
272
|
+
# First pass: identify merge events and their associated commits
|
|
273
|
+
# If a merge event has a commit_id, we'll skip it if there's a matching commit
|
|
274
|
+
for event in data.events:
|
|
275
|
+
if event.event_type == "merge":
|
|
276
|
+
payload = event.payload or {}
|
|
277
|
+
commit_id = payload.get("commit_id")
|
|
278
|
+
source_branch = payload.get("source_branch")
|
|
279
|
+
if commit_id:
|
|
280
|
+
merge_commit_refs.add(commit_id[:8])
|
|
281
|
+
if source_branch:
|
|
282
|
+
merge_source_branches[commit_id[:8]] = source_branch
|
|
283
|
+
|
|
284
|
+
# Check for duplicate merge events (skip merge event if commit exists)
|
|
285
|
+
for event in data.events:
|
|
286
|
+
if event.event_type == "merge":
|
|
287
|
+
payload = event.payload or {}
|
|
288
|
+
commit_id = payload.get("commit_id")
|
|
289
|
+
if commit_id:
|
|
290
|
+
# Check if there's a commit event with matching ref
|
|
291
|
+
for other in data.events:
|
|
292
|
+
if other.event_type == "commit" and other.ref:
|
|
293
|
+
if other.ref.startswith(commit_id[:8]):
|
|
294
|
+
# Skip this merge event, keep the commit
|
|
295
|
+
skip_events.add(event.id)
|
|
296
|
+
break
|
|
297
|
+
|
|
298
|
+
# Add event nodes
|
|
299
|
+
for event in data.events:
|
|
300
|
+
# Skip duplicate events
|
|
301
|
+
if event.id in skip_events:
|
|
302
|
+
continue
|
|
303
|
+
|
|
304
|
+
node_id = f"e_{event.id[:8]}"
|
|
305
|
+
label = _sanitize_mermaid_text(_format_event_label(event))
|
|
306
|
+
payload = event.payload or {}
|
|
307
|
+
|
|
308
|
+
# Check if this is a merge commit:
|
|
309
|
+
# 1. Has parent2 set, OR
|
|
310
|
+
# 2. Message starts with "Merge", OR
|
|
311
|
+
# 3. It's a merge event type
|
|
312
|
+
is_merge_commit = False
|
|
313
|
+
if event.event_type == "commit":
|
|
314
|
+
message = payload.get("message", "")
|
|
315
|
+
has_parent2 = payload.get("parent2") is not None
|
|
316
|
+
is_merge_message = message.lower().startswith("merge")
|
|
317
|
+
is_merge_commit = has_parent2 or is_merge_message
|
|
318
|
+
elif event.event_type == "merge":
|
|
319
|
+
is_merge_commit = True
|
|
320
|
+
|
|
321
|
+
if is_merge_commit:
|
|
322
|
+
merge_commits.add(event.id)
|
|
323
|
+
# Use merge shape for merge commits
|
|
324
|
+
shape_l, shape_r = EVENT_SHAPES.get("merge", ("{{", "}}"))
|
|
325
|
+
# Update label to indicate it's a merge
|
|
326
|
+
if "COMMIT" in label:
|
|
327
|
+
label = label.replace("COMMIT", "MERGE")
|
|
328
|
+
else:
|
|
329
|
+
# Get shape for event type
|
|
330
|
+
shape_l, shape_r = EVENT_SHAPES.get(event.event_type, ("[", "]"))
|
|
331
|
+
|
|
332
|
+
lines.append(f" {node_id}{shape_l}\"{label}\"{shape_r}")
|
|
333
|
+
|
|
334
|
+
# Track which node pairs are connected by explicit edges
|
|
335
|
+
connected_pairs: set[tuple[str, str]] = set()
|
|
336
|
+
|
|
337
|
+
# Add explicit edge connections
|
|
338
|
+
for edge in data.edges:
|
|
339
|
+
source_id = f"e_{edge.source_id[:8]}"
|
|
340
|
+
target_id = f"e_{edge.target_id[:8]}"
|
|
341
|
+
arrow = EDGE_STYLES.get(edge.relationship, "-->")
|
|
342
|
+
|
|
343
|
+
# Check if both nodes exist
|
|
344
|
+
source_exists = any(e.id == edge.source_id for e in data.events)
|
|
345
|
+
target_exists = any(e.id == edge.target_id for e in data.events)
|
|
346
|
+
|
|
347
|
+
if source_exists and target_exists:
|
|
348
|
+
connected_pairs.add((source_id, target_id))
|
|
349
|
+
connected_pairs.add((target_id, source_id)) # Mark both directions
|
|
350
|
+
if edge.relationship in ("reverts", "learned_from"):
|
|
351
|
+
lines.append(f" {source_id} {arrow} {target_id}")
|
|
352
|
+
else:
|
|
353
|
+
lines.append(f" {source_id} --> |{edge.relationship}| {target_id}")
|
|
354
|
+
|
|
355
|
+
# Sort events oldest first for chronological flow (excluding skipped duplicates)
|
|
356
|
+
sorted_events = sorted(
|
|
357
|
+
[e for e in data.events if e.id not in skip_events],
|
|
358
|
+
key=lambda e: e.timestamp
|
|
359
|
+
)
|
|
360
|
+
branch_events = [e for e in sorted_events if e.event_type == "branch"]
|
|
361
|
+
commit_events = [e for e in sorted_events if e.event_type == "commit"]
|
|
362
|
+
non_branch_events = [e for e in sorted_events if e.event_type != "branch"]
|
|
363
|
+
|
|
364
|
+
# Group commits by branch for proper parallel visualization
|
|
365
|
+
commits_by_branch: dict[str, list[Event]] = {}
|
|
366
|
+
commits_without_branch: list[Event] = []
|
|
367
|
+
|
|
368
|
+
for event in non_branch_events:
|
|
369
|
+
payload = event.payload or {}
|
|
370
|
+
branch_name = payload.get("branch")
|
|
371
|
+
if branch_name:
|
|
372
|
+
if branch_name not in commits_by_branch:
|
|
373
|
+
commits_by_branch[branch_name] = []
|
|
374
|
+
commits_by_branch[branch_name].append(event)
|
|
375
|
+
else:
|
|
376
|
+
commits_without_branch.append(event)
|
|
377
|
+
|
|
378
|
+
# Link events within the same branch chronologically
|
|
379
|
+
for branch_name, events in commits_by_branch.items():
|
|
380
|
+
sorted_branch_events = sorted(events, key=lambda e: e.timestamp)
|
|
381
|
+
for i in range(len(sorted_branch_events) - 1):
|
|
382
|
+
current_event = sorted_branch_events[i]
|
|
383
|
+
next_event = sorted_branch_events[i + 1]
|
|
384
|
+
current_id = f"e_{current_event.id[:8]}"
|
|
385
|
+
next_id = f"e_{next_event.id[:8]}"
|
|
386
|
+
if (current_id, next_id) not in connected_pairs:
|
|
387
|
+
lines.append(f" {current_id} --> {next_id}")
|
|
388
|
+
|
|
389
|
+
# For events without branch info, link them chronologically (legacy support)
|
|
390
|
+
for i in range(len(commits_without_branch) - 1):
|
|
391
|
+
current_event = commits_without_branch[i]
|
|
392
|
+
next_event = commits_without_branch[i + 1]
|
|
393
|
+
current_id = f"e_{current_event.id[:8]}"
|
|
394
|
+
next_id = f"e_{next_event.id[:8]}"
|
|
395
|
+
if (current_id, next_id) not in connected_pairs:
|
|
396
|
+
lines.append(f" {current_id} --> {next_id}")
|
|
397
|
+
|
|
398
|
+
# Link branch events properly
|
|
399
|
+
# 1. If branch has a source commit, link FROM that commit (fork point)
|
|
400
|
+
# 2. If branch has no source commit (initial branch), link TO the first commit on that branch
|
|
401
|
+
for branch_event in branch_events:
|
|
402
|
+
branch_id = f"e_{branch_event.id[:8]}"
|
|
403
|
+
payload = branch_event.payload or {}
|
|
404
|
+
source_commit = payload.get("commit_id")
|
|
405
|
+
branch_name = payload.get("branch_name")
|
|
406
|
+
|
|
407
|
+
if source_commit:
|
|
408
|
+
# Branch was created from a specific commit - link FROM that commit
|
|
409
|
+
for commit in commit_events:
|
|
410
|
+
if commit.ref and commit.ref.startswith(source_commit[:8]):
|
|
411
|
+
commit_id = f"e_{commit.id[:8]}"
|
|
412
|
+
if (commit_id, branch_id) not in connected_pairs:
|
|
413
|
+
lines.append(f" {commit_id} -.-> {branch_id}")
|
|
414
|
+
break
|
|
415
|
+
|
|
416
|
+
# Link branch to first commit ON that branch
|
|
417
|
+
if branch_name and branch_name in commits_by_branch:
|
|
418
|
+
first_commit_on_branch = commits_by_branch[branch_name][0]
|
|
419
|
+
first_commit_id = f"e_{first_commit_on_branch.id[:8]}"
|
|
420
|
+
if (branch_id, first_commit_id) not in connected_pairs:
|
|
421
|
+
lines.append(f" {branch_id} --> {first_commit_id}")
|
|
422
|
+
elif not source_commit:
|
|
423
|
+
# Initial branch without branch tracking - link to first commit after it
|
|
424
|
+
for commit in commit_events:
|
|
425
|
+
if commit.timestamp > branch_event.timestamp:
|
|
426
|
+
commit_id = f"e_{commit.id[:8]}"
|
|
427
|
+
if (branch_id, commit_id) not in connected_pairs:
|
|
428
|
+
lines.append(f" {branch_id} --> {commit_id}")
|
|
429
|
+
break
|
|
430
|
+
|
|
431
|
+
# Connect merge commits to BOTH parent branches
|
|
432
|
+
# This shows where branches rejoin
|
|
433
|
+
for event in data.events:
|
|
434
|
+
if event.id in merge_commits:
|
|
435
|
+
payload = event.payload or {}
|
|
436
|
+
merge_id = f"e_{event.id[:8]}"
|
|
437
|
+
|
|
438
|
+
if event.event_type == "commit":
|
|
439
|
+
# For commit events, use parent and parent2
|
|
440
|
+
parent1 = payload.get("parent")
|
|
441
|
+
parent2 = payload.get("parent2")
|
|
442
|
+
|
|
443
|
+
# Find parent1 commit event and connect
|
|
444
|
+
if parent1:
|
|
445
|
+
for commit in commit_events:
|
|
446
|
+
if commit.ref and commit.ref.startswith(parent1[:8]):
|
|
447
|
+
parent1_id = f"e_{commit.id[:8]}"
|
|
448
|
+
if (parent1_id, merge_id) not in connected_pairs:
|
|
449
|
+
lines.append(f" {parent1_id} --> {merge_id}")
|
|
450
|
+
connected_pairs.add((parent1_id, merge_id))
|
|
451
|
+
break
|
|
452
|
+
|
|
453
|
+
# Find parent2 commit event and connect (the merged-in branch)
|
|
454
|
+
if parent2:
|
|
455
|
+
for commit in commit_events:
|
|
456
|
+
if commit.ref and commit.ref.startswith(parent2[:8]):
|
|
457
|
+
parent2_id = f"e_{commit.id[:8]}"
|
|
458
|
+
if (parent2_id, merge_id) not in connected_pairs:
|
|
459
|
+
lines.append(f" {parent2_id} --> {merge_id}")
|
|
460
|
+
connected_pairs.add((parent2_id, merge_id))
|
|
461
|
+
break
|
|
462
|
+
|
|
463
|
+
# If no parent2 but we have source_branch info from merge event
|
|
464
|
+
if not parent2 and event.ref:
|
|
465
|
+
source_branch = merge_source_branches.get(event.ref[:8])
|
|
466
|
+
if source_branch and source_branch in commits_by_branch:
|
|
467
|
+
source_commits = commits_by_branch[source_branch]
|
|
468
|
+
if source_commits:
|
|
469
|
+
last_source = source_commits[-1]
|
|
470
|
+
source_id = f"e_{last_source.id[:8]}"
|
|
471
|
+
if (source_id, merge_id) not in connected_pairs:
|
|
472
|
+
lines.append(f" {source_id} --> {merge_id}")
|
|
473
|
+
connected_pairs.add((source_id, merge_id))
|
|
474
|
+
|
|
475
|
+
elif event.event_type == "merge":
|
|
476
|
+
# For merge events, use source_branch and target_branch
|
|
477
|
+
source_branch = payload.get("source_branch")
|
|
478
|
+
target_branch = payload.get("target_branch")
|
|
479
|
+
|
|
480
|
+
# Find the last commit on source branch and connect to merge
|
|
481
|
+
if source_branch and source_branch in commits_by_branch:
|
|
482
|
+
source_commits = commits_by_branch[source_branch]
|
|
483
|
+
if source_commits:
|
|
484
|
+
last_source = source_commits[-1] # Last commit on source branch
|
|
485
|
+
source_id = f"e_{last_source.id[:8]}"
|
|
486
|
+
if (source_id, merge_id) not in connected_pairs:
|
|
487
|
+
lines.append(f" {source_id} --> {merge_id}")
|
|
488
|
+
connected_pairs.add((source_id, merge_id))
|
|
489
|
+
|
|
490
|
+
# Find the last commit on target branch before merge and connect
|
|
491
|
+
if target_branch and target_branch in commits_by_branch:
|
|
492
|
+
target_commits = [c for c in commits_by_branch[target_branch]
|
|
493
|
+
if c.timestamp < event.timestamp]
|
|
494
|
+
if target_commits:
|
|
495
|
+
last_target = target_commits[-1]
|
|
496
|
+
target_id = f"e_{last_target.id[:8]}"
|
|
497
|
+
if (target_id, merge_id) not in connected_pairs:
|
|
498
|
+
lines.append(f" {target_id} --> {merge_id}")
|
|
499
|
+
connected_pairs.add((target_id, merge_id))
|
|
500
|
+
|
|
501
|
+
# Add annotation nodes when enabled
|
|
502
|
+
annotation_node_ids: list[str] = []
|
|
503
|
+
if show_annotations and data.annotations:
|
|
504
|
+
lines.append("")
|
|
505
|
+
lines.append(" %% Annotations")
|
|
506
|
+
for event_id, annotations in data.annotations.items():
|
|
507
|
+
event_node_id = f"e_{event_id[:8]}"
|
|
508
|
+
for ann in annotations:
|
|
509
|
+
ann_node_id = f"a_{ann.id[:8]}"
|
|
510
|
+
annotation_node_ids.append(ann_node_id)
|
|
511
|
+
# Use note shape for annotations (asymmetric)
|
|
512
|
+
ann_label = _sanitize_mermaid_text(
|
|
513
|
+
f"{ann.annotation_type.upper()}: {ann.content}"
|
|
514
|
+
)
|
|
515
|
+
lines.append(f" {ann_node_id}>{{\"{ann_label}\"}}")
|
|
516
|
+
# Connect annotation to its event with dashed line
|
|
517
|
+
lines.append(f" {ann_node_id} -.-> {event_node_id}")
|
|
518
|
+
|
|
519
|
+
# Add styling
|
|
520
|
+
lines.append("")
|
|
521
|
+
lines.append(" %% Styling")
|
|
522
|
+
lines.append(" classDef commit fill:#4CAF50,color:#fff")
|
|
523
|
+
lines.append(" classDef push fill:#2196F3,color:#fff")
|
|
524
|
+
lines.append(" classDef pull fill:#9C27B0,color:#fff")
|
|
525
|
+
lines.append(" classDef merge fill:#FF9800,color:#fff")
|
|
526
|
+
lines.append(" classDef branch fill:#00BCD4,color:#fff")
|
|
527
|
+
lines.append(" classDef annotation fill:#FFF9C4,color:#333")
|
|
528
|
+
|
|
529
|
+
# Apply styles to nodes (skip duplicates)
|
|
530
|
+
for event in data.events:
|
|
531
|
+
if event.id in skip_events:
|
|
532
|
+
continue
|
|
533
|
+
node_id = f"e_{event.id[:8]}"
|
|
534
|
+
if event.id in merge_commits:
|
|
535
|
+
# Merge commits get merge styling (orange hexagon)
|
|
536
|
+
lines.append(f" class {node_id} merge")
|
|
537
|
+
elif event.event_type in ("commit", "push", "pull", "merge", "branch"):
|
|
538
|
+
lines.append(f" class {node_id} {event.event_type}")
|
|
539
|
+
|
|
540
|
+
# Apply annotation styling
|
|
541
|
+
for ann_node_id in annotation_node_ids:
|
|
542
|
+
lines.append(f" class {ann_node_id} annotation")
|
|
543
|
+
|
|
544
|
+
return "\n".join(lines)
|
|
545
|
+
|
|
546
|
+
|
|
547
|
+
def generate_mermaid_timeline(
|
|
548
|
+
data: GraphData,
|
|
549
|
+
title: str = "Context Timeline",
|
|
550
|
+
) -> str:
|
|
551
|
+
"""Generate Mermaid timeline diagram from graph data.
|
|
552
|
+
|
|
553
|
+
Args:
|
|
554
|
+
data: Graph data to visualize.
|
|
555
|
+
title: Timeline title.
|
|
556
|
+
|
|
557
|
+
Returns:
|
|
558
|
+
Mermaid timeline diagram string.
|
|
559
|
+
"""
|
|
560
|
+
lines = []
|
|
561
|
+
lines.append("timeline")
|
|
562
|
+
lines.append(f" title {title}")
|
|
563
|
+
|
|
564
|
+
# Group events by date
|
|
565
|
+
events_by_date: dict[str, list[Event]] = {}
|
|
566
|
+
for event in sorted(data.events, key=lambda e: e.timestamp):
|
|
567
|
+
try:
|
|
568
|
+
dt = datetime.fromisoformat(event.timestamp)
|
|
569
|
+
date_key = dt.strftime("%Y-%m-%d")
|
|
570
|
+
except (ValueError, TypeError):
|
|
571
|
+
date_key = "Unknown"
|
|
572
|
+
|
|
573
|
+
if date_key not in events_by_date:
|
|
574
|
+
events_by_date[date_key] = []
|
|
575
|
+
events_by_date[date_key].append(event)
|
|
576
|
+
|
|
577
|
+
# Generate timeline sections
|
|
578
|
+
for date_key in sorted(events_by_date.keys()):
|
|
579
|
+
lines.append(f" section {date_key}")
|
|
580
|
+
for event in events_by_date[date_key]:
|
|
581
|
+
# Get time
|
|
582
|
+
try:
|
|
583
|
+
dt = datetime.fromisoformat(event.timestamp)
|
|
584
|
+
time_str = dt.strftime("%H:%M")
|
|
585
|
+
except (ValueError, TypeError):
|
|
586
|
+
time_str = ""
|
|
587
|
+
|
|
588
|
+
# Build event description
|
|
589
|
+
ref_str = event.ref[:8] if event.ref and len(event.ref) > 8 else (event.ref or "")
|
|
590
|
+
desc = f"{event.event_type}"
|
|
591
|
+
if ref_str:
|
|
592
|
+
desc += f" {ref_str}"
|
|
593
|
+
if time_str:
|
|
594
|
+
desc = f"{time_str} - {desc}"
|
|
595
|
+
|
|
596
|
+
lines.append(f" {_sanitize_mermaid_text(desc)}")
|
|
597
|
+
|
|
598
|
+
return "\n".join(lines)
|
|
599
|
+
|
|
600
|
+
|
|
601
|
+
def generate_mermaid_git_graph(
|
|
602
|
+
data: GraphData,
|
|
603
|
+
branch_name: str = "main",
|
|
604
|
+
) -> str:
|
|
605
|
+
"""Generate Mermaid gitGraph diagram from commit/merge/branch events.
|
|
606
|
+
|
|
607
|
+
This shows the git history with proper branch topology including:
|
|
608
|
+
- Commits on branches
|
|
609
|
+
- Branch creation points
|
|
610
|
+
- Merge points where branches join
|
|
611
|
+
|
|
612
|
+
Args:
|
|
613
|
+
data: Graph data to visualize.
|
|
614
|
+
branch_name: Name for the main branch.
|
|
615
|
+
|
|
616
|
+
Returns:
|
|
617
|
+
Mermaid gitGraph diagram string.
|
|
618
|
+
"""
|
|
619
|
+
lines = []
|
|
620
|
+
lines.append("gitGraph")
|
|
621
|
+
|
|
622
|
+
# Filter relevant events (commits, merges, branches)
|
|
623
|
+
commits = [e for e in data.events if e.event_type == "commit"]
|
|
624
|
+
merges = [e for e in data.events if e.event_type == "merge"]
|
|
625
|
+
branches = [e for e in data.events if e.event_type == "branch"]
|
|
626
|
+
lsms = [e for e in data.events if e.event_type == "lsm"]
|
|
627
|
+
|
|
628
|
+
# Sort all events by timestamp
|
|
629
|
+
all_events = commits + merges + branches + lsms
|
|
630
|
+
all_events.sort(key=lambda e: e.timestamp)
|
|
631
|
+
|
|
632
|
+
# Track active branches
|
|
633
|
+
active_branches: set[str] = {branch_name}
|
|
634
|
+
current_branch = branch_name
|
|
635
|
+
|
|
636
|
+
# Build parent-to-children map for branch detection
|
|
637
|
+
parent_to_children: dict[str, list[str]] = {}
|
|
638
|
+
commit_to_branch: dict[str, str] = {}
|
|
639
|
+
|
|
640
|
+
# First pass: analyze branch structure from events
|
|
641
|
+
for event in all_events:
|
|
642
|
+
if event.event_type == "branch":
|
|
643
|
+
payload = event.payload or {}
|
|
644
|
+
action = payload.get("action", "")
|
|
645
|
+
br_name = payload.get("branch_name", "")
|
|
646
|
+
if action == "create" and br_name:
|
|
647
|
+
active_branches.add(br_name)
|
|
648
|
+
elif event.event_type == "commit":
|
|
649
|
+
payload = event.payload or {}
|
|
650
|
+
parent = payload.get("parent")
|
|
651
|
+
commit_id = event.ref or event.id[:12]
|
|
652
|
+
if parent:
|
|
653
|
+
if parent not in parent_to_children:
|
|
654
|
+
parent_to_children[parent] = []
|
|
655
|
+
parent_to_children[parent].append(commit_id)
|
|
656
|
+
|
|
657
|
+
# Second pass: generate mermaid commands
|
|
658
|
+
for event in all_events:
|
|
659
|
+
if event.event_type == "branch":
|
|
660
|
+
payload = event.payload or {}
|
|
661
|
+
action = payload.get("action", "")
|
|
662
|
+
br_name = payload.get("branch_name", "")
|
|
663
|
+
if action == "create" and br_name and br_name != branch_name:
|
|
664
|
+
lines.append(f" branch {_sanitize_branch_name(br_name)}")
|
|
665
|
+
current_branch = br_name
|
|
666
|
+
|
|
667
|
+
elif event.event_type == "commit":
|
|
668
|
+
payload = event.payload or {}
|
|
669
|
+
msg = payload.get("message", "")
|
|
670
|
+
if not msg:
|
|
671
|
+
msg = f"Commit {event.ref[:8] if event.ref else event.id[:8]}"
|
|
672
|
+
msg = _sanitize_mermaid_text(msg)
|
|
673
|
+
commit_id = (event.ref[:8] if event.ref else event.id[:8])
|
|
674
|
+
|
|
675
|
+
# Check if this is a merge commit (has parent2)
|
|
676
|
+
parent2 = payload.get("parent2")
|
|
677
|
+
if parent2:
|
|
678
|
+
lines.append(f' commit id: "{commit_id}" msg: "{msg}" type: HIGHLIGHT')
|
|
679
|
+
else:
|
|
680
|
+
lines.append(f' commit id: "{commit_id}" msg: "{msg}"')
|
|
681
|
+
|
|
682
|
+
elif event.event_type == "merge":
|
|
683
|
+
payload = event.payload or {}
|
|
684
|
+
source_branch = payload.get("source_branch", "feature")
|
|
685
|
+
target_branch = payload.get("target_branch", branch_name)
|
|
686
|
+
commit_id = payload.get("commit_id", "")
|
|
687
|
+
|
|
688
|
+
# Checkout target branch and merge
|
|
689
|
+
if target_branch != current_branch:
|
|
690
|
+
lines.append(f" checkout {_sanitize_branch_name(target_branch)}")
|
|
691
|
+
current_branch = target_branch
|
|
692
|
+
|
|
693
|
+
lines.append(f" merge {_sanitize_branch_name(source_branch)}")
|
|
694
|
+
|
|
695
|
+
elif event.event_type == "lsm":
|
|
696
|
+
payload = event.payload or {}
|
|
697
|
+
source = payload.get("source", "source")
|
|
698
|
+
transferred = payload.get("transferred_count", 0)
|
|
699
|
+
msg = f"LSM from {source} ({transferred} transferred)"
|
|
700
|
+
msg = _sanitize_mermaid_text(msg)
|
|
701
|
+
lines.append(f' commit id: "lsm-{event.id[:6]}" msg: "{msg}" type: REVERSE')
|
|
702
|
+
|
|
703
|
+
if not all_events:
|
|
704
|
+
lines.append(' commit id: "initial" msg: "No commits yet"')
|
|
705
|
+
|
|
706
|
+
return "\n".join(lines)
|
|
707
|
+
|
|
708
|
+
|
|
709
|
+
def _sanitize_branch_name(name: str) -> str:
|
|
710
|
+
"""Sanitize branch name for Mermaid gitGraph.
|
|
711
|
+
|
|
712
|
+
Args:
|
|
713
|
+
name: Branch name to sanitize.
|
|
714
|
+
|
|
715
|
+
Returns:
|
|
716
|
+
Sanitized branch name (alphanumeric and dashes only).
|
|
717
|
+
"""
|
|
718
|
+
# Replace slashes and other special chars with dashes
|
|
719
|
+
sanitized = re.sub(r"[^a-zA-Z0-9-]", "-", name)
|
|
720
|
+
# Remove consecutive dashes
|
|
721
|
+
sanitized = re.sub(r"-+", "-", sanitized)
|
|
722
|
+
# Remove leading/trailing dashes
|
|
723
|
+
return sanitized.strip("-") or "branch"
|
|
724
|
+
|
|
725
|
+
|
|
726
|
+
# ---- ASCII Art Generation ------------------------------------------------------------------------------------
|
|
727
|
+
|
|
728
|
+
|
|
729
|
+
def _wrap_text(text: str, width: int) -> list[str]:
|
|
730
|
+
"""Wrap text to specified width.
|
|
731
|
+
|
|
732
|
+
Args:
|
|
733
|
+
text: Text to wrap.
|
|
734
|
+
width: Maximum line width.
|
|
735
|
+
|
|
736
|
+
Returns:
|
|
737
|
+
List of wrapped lines.
|
|
738
|
+
"""
|
|
739
|
+
words = text.split()
|
|
740
|
+
lines = []
|
|
741
|
+
current_line: list[str] = []
|
|
742
|
+
current_length = 0
|
|
743
|
+
|
|
744
|
+
for word in words:
|
|
745
|
+
if current_length + len(word) + 1 <= width:
|
|
746
|
+
current_line.append(word)
|
|
747
|
+
current_length += len(word) + 1
|
|
748
|
+
else:
|
|
749
|
+
if current_line:
|
|
750
|
+
lines.append(" ".join(current_line))
|
|
751
|
+
current_line = [word]
|
|
752
|
+
current_length = len(word)
|
|
753
|
+
|
|
754
|
+
if current_line:
|
|
755
|
+
lines.append(" ".join(current_line))
|
|
756
|
+
|
|
757
|
+
return lines or [""]
|
|
758
|
+
|
|
759
|
+
|
|
760
|
+
def generate_ascii_timeline(
|
|
761
|
+
data: GraphData,
|
|
762
|
+
width: int = 60,
|
|
763
|
+
use_unicode: bool = True,
|
|
764
|
+
) -> str:
|
|
765
|
+
"""Generate ASCII art timeline from graph data.
|
|
766
|
+
|
|
767
|
+
Args:
|
|
768
|
+
data: Graph data to visualize.
|
|
769
|
+
width: Maximum width of output.
|
|
770
|
+
use_unicode: Use Unicode box-drawing characters.
|
|
771
|
+
|
|
772
|
+
Returns:
|
|
773
|
+
ASCII art timeline string.
|
|
774
|
+
"""
|
|
775
|
+
box = ASCII_BOX if use_unicode else ASCII_BOX_SIMPLE
|
|
776
|
+
lines = []
|
|
777
|
+
|
|
778
|
+
# Sort events by timestamp
|
|
779
|
+
events = sorted(data.events, key=lambda e: e.timestamp, reverse=True)
|
|
780
|
+
|
|
781
|
+
content_width = width - 4 # Account for box borders
|
|
782
|
+
|
|
783
|
+
for i, event in enumerate(events):
|
|
784
|
+
# Parse timestamp
|
|
785
|
+
try:
|
|
786
|
+
dt = datetime.fromisoformat(event.timestamp)
|
|
787
|
+
time_str = dt.strftime("%Y-%m-%d %H:%M")
|
|
788
|
+
except (ValueError, TypeError):
|
|
789
|
+
time_str = event.timestamp[:16] if event.timestamp else "Unknown"
|
|
790
|
+
|
|
791
|
+
# Build header
|
|
792
|
+
event_type = event.event_type.upper()
|
|
793
|
+
ref_str = ""
|
|
794
|
+
if event.ref:
|
|
795
|
+
ref_str = event.ref[:12] if len(event.ref) > 12 else event.ref
|
|
796
|
+
|
|
797
|
+
header = f"{event_type}"
|
|
798
|
+
if ref_str:
|
|
799
|
+
header += f" [{ref_str}]"
|
|
800
|
+
|
|
801
|
+
# Add actor if present
|
|
802
|
+
if event.actor:
|
|
803
|
+
header += f" by {event.actor}"
|
|
804
|
+
|
|
805
|
+
# Top border
|
|
806
|
+
if i == 0:
|
|
807
|
+
lines.append(box["tl"] + box["h"] * (width - 2) + box["tr"])
|
|
808
|
+
else:
|
|
809
|
+
lines.append(box["l"] + box["h"] * (width - 2) + box["r"])
|
|
810
|
+
|
|
811
|
+
# Time line
|
|
812
|
+
time_line = f" {time_str}"
|
|
813
|
+
time_line = time_line.ljust(content_width)
|
|
814
|
+
lines.append(f"{box['v']} {time_line} {box['v']}")
|
|
815
|
+
|
|
816
|
+
# Header line
|
|
817
|
+
header_lines = _wrap_text(header, content_width)
|
|
818
|
+
for hline in header_lines:
|
|
819
|
+
lines.append(f"{box['v']} {hline.ljust(content_width)} {box['v']}")
|
|
820
|
+
|
|
821
|
+
# Separator
|
|
822
|
+
lines.append(f"{box['v']} {'-' * content_width} {box['v']}")
|
|
823
|
+
|
|
824
|
+
# Annotations
|
|
825
|
+
if event.id in data.annotations:
|
|
826
|
+
for ann in data.annotations[event.id][:2]: # Max 2 annotations
|
|
827
|
+
ann_prefix = f"[{ann.annotation_type[:3].upper()}] "
|
|
828
|
+
ann_text = ann_prefix + ann.content
|
|
829
|
+
ann_lines = _wrap_text(ann_text, content_width)
|
|
830
|
+
for aline in ann_lines:
|
|
831
|
+
lines.append(f"{box['v']} {aline.ljust(content_width)} {box['v']}")
|
|
832
|
+
|
|
833
|
+
# Payload summary (if has message)
|
|
834
|
+
msg = event.payload.get("message", "")
|
|
835
|
+
if msg:
|
|
836
|
+
msg_lines = _wrap_text(f'"{msg}"', content_width)
|
|
837
|
+
for mline in msg_lines:
|
|
838
|
+
lines.append(f"{box['v']} {mline.ljust(content_width)} {box['v']}")
|
|
839
|
+
|
|
840
|
+
# Bottom border
|
|
841
|
+
if events:
|
|
842
|
+
lines.append(box["bl"] + box["h"] * (width - 2) + box["br"])
|
|
843
|
+
else:
|
|
844
|
+
lines.append("(No events to display)")
|
|
845
|
+
|
|
846
|
+
return "\n".join(lines)
|
|
847
|
+
|
|
848
|
+
|
|
849
|
+
def generate_ascii_graph(
|
|
850
|
+
data: GraphData,
|
|
851
|
+
width: int = 80,
|
|
852
|
+
use_unicode: bool = True,
|
|
853
|
+
) -> str:
|
|
854
|
+
"""Generate ASCII art graph showing relationships.
|
|
855
|
+
|
|
856
|
+
Args:
|
|
857
|
+
data: Graph data to visualize.
|
|
858
|
+
width: Maximum width of output.
|
|
859
|
+
use_unicode: Use Unicode characters.
|
|
860
|
+
|
|
861
|
+
Returns:
|
|
862
|
+
ASCII art graph string.
|
|
863
|
+
"""
|
|
864
|
+
lines = []
|
|
865
|
+
node_char = "●" if use_unicode else "*"
|
|
866
|
+
arrow_r = "→" if use_unicode else "->"
|
|
867
|
+
arrow_d = "↓" if use_unicode else "v"
|
|
868
|
+
|
|
869
|
+
# Sort events by timestamp
|
|
870
|
+
events = sorted(data.events, key=lambda e: e.timestamp)
|
|
871
|
+
|
|
872
|
+
# Build node lookup
|
|
873
|
+
node_positions: dict[str, int] = {}
|
|
874
|
+
for i, event in enumerate(events):
|
|
875
|
+
node_positions[event.id] = i
|
|
876
|
+
|
|
877
|
+
# Generate node lines
|
|
878
|
+
for i, event in enumerate(events):
|
|
879
|
+
# Parse timestamp
|
|
880
|
+
try:
|
|
881
|
+
dt = datetime.fromisoformat(event.timestamp)
|
|
882
|
+
time_str = dt.strftime("%m/%d %H:%M")
|
|
883
|
+
except (ValueError, TypeError):
|
|
884
|
+
time_str = ""
|
|
885
|
+
|
|
886
|
+
# Build node line
|
|
887
|
+
type_str = event.event_type[:6].ljust(6)
|
|
888
|
+
ref_str = (event.ref[:8] if event.ref else event.id[:8]).ljust(8)
|
|
889
|
+
|
|
890
|
+
node_line = f" {node_char} {type_str} {ref_str} {time_str}"
|
|
891
|
+
|
|
892
|
+
# Find outgoing edges
|
|
893
|
+
outgoing = [e for e in data.edges if e.source_id == event.id]
|
|
894
|
+
if outgoing:
|
|
895
|
+
edge_info = []
|
|
896
|
+
for edge in outgoing[:2]: # Max 2 edges shown
|
|
897
|
+
target_pos = node_positions.get(edge.target_id)
|
|
898
|
+
if target_pos is not None:
|
|
899
|
+
edge_info.append(f"{arrow_r} {edge.relationship}")
|
|
900
|
+
if edge_info:
|
|
901
|
+
node_line += f" ({', '.join(edge_info)})"
|
|
902
|
+
|
|
903
|
+
lines.append(node_line)
|
|
904
|
+
|
|
905
|
+
# Add connector to next node
|
|
906
|
+
if i < len(events) - 1:
|
|
907
|
+
lines.append(f" {arrow_d}")
|
|
908
|
+
|
|
909
|
+
if not events:
|
|
910
|
+
lines.append("(No events to display)")
|
|
911
|
+
|
|
912
|
+
# Add legend
|
|
913
|
+
lines.append("")
|
|
914
|
+
lines.append("Legend:")
|
|
915
|
+
lines.append(f" {node_char} = Event node")
|
|
916
|
+
lines.append(f" {arrow_r} = Relationship")
|
|
917
|
+
|
|
918
|
+
return "\n".join(lines)
|
|
919
|
+
|
|
920
|
+
|
|
921
|
+
# ---- HTML Generation -----------------------------------------------------------------------------------------
|
|
922
|
+
|
|
923
|
+
|
|
924
|
+
def generate_html_visualization(
|
|
925
|
+
data: GraphData,
|
|
926
|
+
title: str = "Context Graph",
|
|
927
|
+
theme: str = "light",
|
|
928
|
+
) -> str:
|
|
929
|
+
"""Generate standalone HTML file with interactive visualization.
|
|
930
|
+
|
|
931
|
+
Uses embedded Mermaid.js for rendering. Can be opened directly
|
|
932
|
+
in IDE preview panes or browsers.
|
|
933
|
+
|
|
934
|
+
Args:
|
|
935
|
+
data: Graph data to visualize.
|
|
936
|
+
title: Page title.
|
|
937
|
+
theme: Color theme ('light' or 'dark').
|
|
938
|
+
|
|
939
|
+
Returns:
|
|
940
|
+
Complete HTML document string.
|
|
941
|
+
"""
|
|
942
|
+
# Generate Mermaid diagram
|
|
943
|
+
mermaid_flowchart = generate_mermaid_flowchart(data, show_annotations=True)
|
|
944
|
+
mermaid_timeline = generate_mermaid_timeline(data)
|
|
945
|
+
|
|
946
|
+
# Escape for HTML embedding
|
|
947
|
+
mermaid_flowchart_escaped = html.escape(mermaid_flowchart)
|
|
948
|
+
mermaid_timeline_escaped = html.escape(mermaid_timeline)
|
|
949
|
+
|
|
950
|
+
# Theme-based colors
|
|
951
|
+
if theme == "dark":
|
|
952
|
+
bg_color = "#1e1e1e"
|
|
953
|
+
text_color = "#d4d4d4"
|
|
954
|
+
card_bg = "#252526"
|
|
955
|
+
border_color = "#3c3c3c"
|
|
956
|
+
mermaid_theme = "dark"
|
|
957
|
+
else:
|
|
958
|
+
bg_color = "#ffffff"
|
|
959
|
+
text_color = "#333333"
|
|
960
|
+
card_bg = "#f5f5f5"
|
|
961
|
+
border_color = "#e0e0e0"
|
|
962
|
+
mermaid_theme = "default"
|
|
963
|
+
|
|
964
|
+
html_template = f"""<!DOCTYPE html>
|
|
965
|
+
<html lang="en">
|
|
966
|
+
<head>
|
|
967
|
+
<meta charset="UTF-8">
|
|
968
|
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
969
|
+
<title>{html.escape(title)}</title>
|
|
970
|
+
<script src="https://cdn.jsdelivr.net/npm/mermaid@10/dist/mermaid.min.js"></script>
|
|
971
|
+
<style>
|
|
972
|
+
* {{
|
|
973
|
+
box-sizing: border-box;
|
|
974
|
+
margin: 0;
|
|
975
|
+
padding: 0;
|
|
976
|
+
}}
|
|
977
|
+
body {{
|
|
978
|
+
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
|
|
979
|
+
background-color: {bg_color};
|
|
980
|
+
color: {text_color};
|
|
981
|
+
padding: 20px;
|
|
982
|
+
line-height: 1.6;
|
|
983
|
+
}}
|
|
984
|
+
h1, h2 {{
|
|
985
|
+
margin-bottom: 16px;
|
|
986
|
+
}}
|
|
987
|
+
.container {{
|
|
988
|
+
max-width: 1200px;
|
|
989
|
+
margin: 0 auto;
|
|
990
|
+
}}
|
|
991
|
+
.card {{
|
|
992
|
+
background: {card_bg};
|
|
993
|
+
border: 1px solid {border_color};
|
|
994
|
+
border-radius: 8px;
|
|
995
|
+
padding: 20px;
|
|
996
|
+
margin-bottom: 20px;
|
|
997
|
+
}}
|
|
998
|
+
.mermaid {{
|
|
999
|
+
text-align: center;
|
|
1000
|
+
}}
|
|
1001
|
+
.tabs {{
|
|
1002
|
+
display: flex;
|
|
1003
|
+
gap: 8px;
|
|
1004
|
+
margin-bottom: 16px;
|
|
1005
|
+
}}
|
|
1006
|
+
.tab {{
|
|
1007
|
+
padding: 8px 16px;
|
|
1008
|
+
border: 1px solid {border_color};
|
|
1009
|
+
border-radius: 4px;
|
|
1010
|
+
cursor: pointer;
|
|
1011
|
+
background: transparent;
|
|
1012
|
+
color: {text_color};
|
|
1013
|
+
}}
|
|
1014
|
+
.tab.active {{
|
|
1015
|
+
background: #4CAF50;
|
|
1016
|
+
color: white;
|
|
1017
|
+
border-color: #4CAF50;
|
|
1018
|
+
}}
|
|
1019
|
+
.tab-content {{
|
|
1020
|
+
display: none;
|
|
1021
|
+
}}
|
|
1022
|
+
.tab-content.active {{
|
|
1023
|
+
display: block;
|
|
1024
|
+
}}
|
|
1025
|
+
.stats {{
|
|
1026
|
+
display: grid;
|
|
1027
|
+
grid-template-columns: repeat(auto-fit, minmax(120px, 1fr));
|
|
1028
|
+
gap: 16px;
|
|
1029
|
+
margin-bottom: 20px;
|
|
1030
|
+
}}
|
|
1031
|
+
.stat {{
|
|
1032
|
+
text-align: center;
|
|
1033
|
+
padding: 16px;
|
|
1034
|
+
background: {card_bg};
|
|
1035
|
+
border-radius: 8px;
|
|
1036
|
+
border: 1px solid {border_color};
|
|
1037
|
+
}}
|
|
1038
|
+
.stat-value {{
|
|
1039
|
+
font-size: 2em;
|
|
1040
|
+
font-weight: bold;
|
|
1041
|
+
color: #4CAF50;
|
|
1042
|
+
}}
|
|
1043
|
+
.stat-label {{
|
|
1044
|
+
font-size: 0.9em;
|
|
1045
|
+
opacity: 0.8;
|
|
1046
|
+
}}
|
|
1047
|
+
pre {{
|
|
1048
|
+
background: {card_bg};
|
|
1049
|
+
padding: 16px;
|
|
1050
|
+
border-radius: 4px;
|
|
1051
|
+
overflow-x: auto;
|
|
1052
|
+
font-size: 0.85em;
|
|
1053
|
+
}}
|
|
1054
|
+
</style>
|
|
1055
|
+
</head>
|
|
1056
|
+
<body>
|
|
1057
|
+
<div class="container">
|
|
1058
|
+
<h1>{html.escape(title)}</h1>
|
|
1059
|
+
|
|
1060
|
+
<div class="stats">
|
|
1061
|
+
<div class="stat">
|
|
1062
|
+
<div class="stat-value">{len(data.events)}</div>
|
|
1063
|
+
<div class="stat-label">Events</div>
|
|
1064
|
+
</div>
|
|
1065
|
+
<div class="stat">
|
|
1066
|
+
<div class="stat-value">{len(data.edges)}</div>
|
|
1067
|
+
<div class="stat-label">Relationships</div>
|
|
1068
|
+
</div>
|
|
1069
|
+
<div class="stat">
|
|
1070
|
+
<div class="stat-value">{sum(len(v) for v in data.annotations.values())}</div>
|
|
1071
|
+
<div class="stat-label">Annotations</div>
|
|
1072
|
+
</div>
|
|
1073
|
+
</div>
|
|
1074
|
+
|
|
1075
|
+
<div class="tabs">
|
|
1076
|
+
<button class="tab active" onclick="showTab('flowchart')">Flowchart</button>
|
|
1077
|
+
<button class="tab" onclick="showTab('timeline')">Timeline</button>
|
|
1078
|
+
<button class="tab" onclick="showTab('source')">Source</button>
|
|
1079
|
+
</div>
|
|
1080
|
+
|
|
1081
|
+
<div id="flowchart" class="tab-content active">
|
|
1082
|
+
<div class="card">
|
|
1083
|
+
<h2>Event Graph</h2>
|
|
1084
|
+
<pre class="mermaid">
|
|
1085
|
+
{mermaid_flowchart_escaped}
|
|
1086
|
+
</pre>
|
|
1087
|
+
</div>
|
|
1088
|
+
</div>
|
|
1089
|
+
|
|
1090
|
+
<div id="timeline" class="tab-content">
|
|
1091
|
+
<div class="card">
|
|
1092
|
+
<h2>Timeline View</h2>
|
|
1093
|
+
<pre class="mermaid">
|
|
1094
|
+
{mermaid_timeline_escaped}
|
|
1095
|
+
</pre>
|
|
1096
|
+
</div>
|
|
1097
|
+
</div>
|
|
1098
|
+
|
|
1099
|
+
<div id="source" class="tab-content">
|
|
1100
|
+
<div class="card">
|
|
1101
|
+
<h2>Mermaid Source</h2>
|
|
1102
|
+
<h3>Flowchart</h3>
|
|
1103
|
+
<pre>{mermaid_flowchart_escaped}</pre>
|
|
1104
|
+
<h3 style="margin-top: 16px;">Timeline</h3>
|
|
1105
|
+
<pre>{mermaid_timeline_escaped}</pre>
|
|
1106
|
+
</div>
|
|
1107
|
+
</div>
|
|
1108
|
+
</div>
|
|
1109
|
+
|
|
1110
|
+
<script>
|
|
1111
|
+
mermaid.initialize({{
|
|
1112
|
+
startOnLoad: true,
|
|
1113
|
+
theme: '{mermaid_theme}',
|
|
1114
|
+
flowchart: {{
|
|
1115
|
+
useMaxWidth: true,
|
|
1116
|
+
htmlLabels: true
|
|
1117
|
+
}}
|
|
1118
|
+
}});
|
|
1119
|
+
|
|
1120
|
+
function showTab(tabId) {{
|
|
1121
|
+
// Hide all tab contents
|
|
1122
|
+
document.querySelectorAll('.tab-content').forEach(el => {{
|
|
1123
|
+
el.classList.remove('active');
|
|
1124
|
+
}});
|
|
1125
|
+
document.querySelectorAll('.tab').forEach(el => {{
|
|
1126
|
+
el.classList.remove('active');
|
|
1127
|
+
}});
|
|
1128
|
+
|
|
1129
|
+
// Show selected tab
|
|
1130
|
+
document.getElementById(tabId).classList.add('active');
|
|
1131
|
+
event.target.classList.add('active');
|
|
1132
|
+
|
|
1133
|
+
// Re-render mermaid diagrams
|
|
1134
|
+
mermaid.contentLoaded();
|
|
1135
|
+
}}
|
|
1136
|
+
</script>
|
|
1137
|
+
</body>
|
|
1138
|
+
</html>"""
|
|
1139
|
+
|
|
1140
|
+
return html_template
|
|
1141
|
+
|
|
1142
|
+
|
|
1143
|
+
# ---- Convenience Functions -----------------------------------------------------------------------------------
|
|
1144
|
+
|
|
1145
|
+
|
|
1146
|
+
def visualize_context(
|
|
1147
|
+
store: ContextStore,
|
|
1148
|
+
output_format: str = "mermaid",
|
|
1149
|
+
limit: int = 50,
|
|
1150
|
+
event_types: list[str] | None = None,
|
|
1151
|
+
**kwargs: Any,
|
|
1152
|
+
) -> str:
|
|
1153
|
+
"""Generate visualization from context store.
|
|
1154
|
+
|
|
1155
|
+
Main entry point for visualization generation.
|
|
1156
|
+
|
|
1157
|
+
Args:
|
|
1158
|
+
store: Context store to visualize.
|
|
1159
|
+
output_format: Output format ('mermaid', 'mermaid-timeline',
|
|
1160
|
+
'mermaid-git', 'ascii', 'ascii-graph', 'html').
|
|
1161
|
+
limit: Maximum events to include.
|
|
1162
|
+
event_types: Filter by event types.
|
|
1163
|
+
**kwargs: Additional format-specific options.
|
|
1164
|
+
|
|
1165
|
+
Returns:
|
|
1166
|
+
Visualization string in requested format.
|
|
1167
|
+
|
|
1168
|
+
Raises:
|
|
1169
|
+
ValueError: If output_format is not recognized.
|
|
1170
|
+
"""
|
|
1171
|
+
# Build graph data
|
|
1172
|
+
data = GraphData.from_context_store(
|
|
1173
|
+
store,
|
|
1174
|
+
limit=limit,
|
|
1175
|
+
event_types=event_types,
|
|
1176
|
+
)
|
|
1177
|
+
|
|
1178
|
+
# Generate requested format
|
|
1179
|
+
if output_format == "mermaid":
|
|
1180
|
+
return generate_mermaid_flowchart(
|
|
1181
|
+
data,
|
|
1182
|
+
direction=kwargs.get("direction", "TB"),
|
|
1183
|
+
show_annotations=kwargs.get("show_annotations", True),
|
|
1184
|
+
title=kwargs.get("title"),
|
|
1185
|
+
)
|
|
1186
|
+
elif output_format == "mermaid-timeline":
|
|
1187
|
+
return generate_mermaid_timeline(
|
|
1188
|
+
data,
|
|
1189
|
+
title=kwargs.get("title", "Context Timeline"),
|
|
1190
|
+
)
|
|
1191
|
+
elif output_format == "mermaid-git":
|
|
1192
|
+
return generate_mermaid_git_graph(
|
|
1193
|
+
data,
|
|
1194
|
+
branch_name=kwargs.get("branch_name", "main"),
|
|
1195
|
+
)
|
|
1196
|
+
elif output_format == "ascii":
|
|
1197
|
+
return generate_ascii_timeline(
|
|
1198
|
+
data,
|
|
1199
|
+
width=kwargs.get("width", 60),
|
|
1200
|
+
use_unicode=kwargs.get("use_unicode", True),
|
|
1201
|
+
)
|
|
1202
|
+
elif output_format == "ascii-graph":
|
|
1203
|
+
return generate_ascii_graph(
|
|
1204
|
+
data,
|
|
1205
|
+
width=kwargs.get("width", 80),
|
|
1206
|
+
use_unicode=kwargs.get("use_unicode", True),
|
|
1207
|
+
)
|
|
1208
|
+
elif output_format == "html":
|
|
1209
|
+
return generate_html_visualization(
|
|
1210
|
+
data,
|
|
1211
|
+
title=kwargs.get("title", "Context Graph"),
|
|
1212
|
+
theme=kwargs.get("theme", "light"),
|
|
1213
|
+
)
|
|
1214
|
+
else:
|
|
1215
|
+
msg = f"Unknown output format: {output_format}. "
|
|
1216
|
+
msg += "Supported: mermaid, mermaid-timeline, mermaid-git, ascii, ascii-graph, html"
|
|
1217
|
+
raise ValueError(msg)
|