htmlgraph 0.20.9__py3-none-any.whl → 0.22.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. htmlgraph/__init__.py +1 -1
  2. htmlgraph/analytics/__init__.py +3 -1
  3. htmlgraph/analytics/cross_session.py +612 -0
  4. htmlgraph/archive/__init__.py +24 -0
  5. htmlgraph/archive/bloom.py +234 -0
  6. htmlgraph/archive/fts.py +297 -0
  7. htmlgraph/archive/manager.py +583 -0
  8. htmlgraph/archive/search.py +244 -0
  9. htmlgraph/cli.py +510 -0
  10. htmlgraph/converter.py +39 -0
  11. htmlgraph/docs/__init__.py +77 -0
  12. htmlgraph/docs/docs_version.py +55 -0
  13. htmlgraph/docs/metadata.py +93 -0
  14. htmlgraph/docs/migrations.py +232 -0
  15. htmlgraph/docs/template_engine.py +143 -0
  16. htmlgraph/docs/templates/_sections/cli_reference.md.j2 +52 -0
  17. htmlgraph/docs/templates/_sections/core_concepts.md.j2 +29 -0
  18. htmlgraph/docs/templates/_sections/sdk_basics.md.j2 +69 -0
  19. htmlgraph/docs/templates/base_agents.md.j2 +78 -0
  20. htmlgraph/docs/templates/example_user_override.md.j2 +47 -0
  21. htmlgraph/docs/version_check.py +161 -0
  22. htmlgraph/learning.py +121 -97
  23. htmlgraph/models.py +53 -1
  24. htmlgraph/sdk.py +4 -1
  25. {htmlgraph-0.20.9.dist-info → htmlgraph-0.22.0.dist-info}/METADATA +1 -1
  26. {htmlgraph-0.20.9.dist-info → htmlgraph-0.22.0.dist-info}/RECORD +33 -16
  27. {htmlgraph-0.20.9.data → htmlgraph-0.22.0.data}/data/htmlgraph/dashboard.html +0 -0
  28. {htmlgraph-0.20.9.data → htmlgraph-0.22.0.data}/data/htmlgraph/styles.css +0 -0
  29. {htmlgraph-0.20.9.data → htmlgraph-0.22.0.data}/data/htmlgraph/templates/AGENTS.md.template +0 -0
  30. {htmlgraph-0.20.9.data → htmlgraph-0.22.0.data}/data/htmlgraph/templates/CLAUDE.md.template +0 -0
  31. {htmlgraph-0.20.9.data → htmlgraph-0.22.0.data}/data/htmlgraph/templates/GEMINI.md.template +0 -0
  32. {htmlgraph-0.20.9.dist-info → htmlgraph-0.22.0.dist-info}/WHEEL +0 -0
  33. {htmlgraph-0.20.9.dist-info → htmlgraph-0.22.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,78 @@
1
+ {# Base template for AGENTS.md - Package provides this #}
2
+ ---
3
+ version: "{{ sdk_version }}"
4
+ platform: "{{ platform }}"
5
+ generated: "{{ generated_at }}"
6
+ ---
7
+
8
+ {% block header %}
9
+ # HtmlGraph Agent Documentation
10
+ {% endblock %}
11
+
12
+ {% block introduction %}
13
+ ## Introduction
14
+
15
+ HtmlGraph is a lightweight graph database framework built entirely on web standards (HTML, CSS, JavaScript) for AI agent coordination and human observability.
16
+
17
+ **Tagline**: "HTML is All You Need"
18
+ {% endblock %}
19
+
20
+ {% block quick_start %}
21
+ ## Quick Start
22
+
23
+ ```python
24
+ from htmlgraph import SDK
25
+
26
+ # Initialize SDK for your agent
27
+ sdk = SDK(agent="{{ platform }}")
28
+
29
+ # Create a feature
30
+ feature = sdk.features.create("Add authentication") \
31
+ .set_priority("high") \
32
+ .add_steps(["Setup OAuth", "Add JWT", "Test login"]) \
33
+ .save()
34
+
35
+ # Track progress
36
+ sdk.features.get(feature.id).complete_step(0).save()
37
+ ```
38
+ {% endblock %}
39
+
40
+ {% block core_concepts %}
41
+ {% include "_sections/core_concepts.md.j2" %}
42
+ {% endblock %}
43
+
44
+ {% block sdk_reference %}
45
+ {% include "_sections/sdk_basics.md.j2" %}
46
+ {% endblock %}
47
+
48
+ {% block cli_reference %}
49
+ {% include "_sections/cli_reference.md.j2" %}
50
+ {% endblock %}
51
+
52
+ {% block custom_workflows %}
53
+ {# Users can override this block in .htmlgraph/docs/templates/agents.md.j2 #}
54
+ {% if custom_workflows %}
55
+ ## Custom Workflows
56
+
57
+ {{ custom_workflows }}
58
+ {% endif %}
59
+ {% endblock %}
60
+
61
+ {% block deployment %}
62
+ ## Deployment
63
+
64
+ For deployment instructions, see the deployment guide:
65
+
66
+ ```bash
67
+ # Build and publish package
68
+ ./scripts/deploy-all.sh 0.21.0 --no-confirm
69
+
70
+ # Documentation changes only
71
+ ./scripts/deploy-all.sh --docs-only
72
+ ```
73
+ {% endblock %}
74
+
75
+ {% block footer %}
76
+ ---
77
+ *Generated by HtmlGraph v{{ sdk_version }} on {{ generated_at }}*
78
+ {% endblock %}
@@ -0,0 +1,47 @@
1
+ {# Example user override template - copy to .htmlgraph/docs/templates/agents.md.j2 #}
2
+ {% extends "base_agents.md.j2" %}
3
+
4
+ {# Override the header to customize branding #}
5
+ {% block header %}
6
+ # 🤖 {{ platform|title }} Agent - Our Team Documentation
7
+ {% endblock %}
8
+
9
+ {# Add custom team workflows #}
10
+ {% block custom_workflows %}
11
+ ## Our Team Conventions
12
+
13
+ ### Daily Workflow
14
+ 1. **Morning Standup** - Review `sdk.summary()` for overnight progress
15
+ 2. **Pick Task** - Use `sdk.analytics.recommend_next_work()` for priorities
16
+ 3. **Daily Feature** - Create with template: `feat-{YYYYMMDD}-{description}`
17
+ 4. **End of Day** - Commit with `./scripts/git-commit-push.sh`
18
+
19
+ ### Commit Message Format
20
+ ```
21
+ type(scope): description
22
+
23
+ Examples:
24
+ - feat(auth): add OAuth provider integration
25
+ - fix(api): resolve session timeout bug
26
+ - docs(readme): update installation steps
27
+ ```
28
+
29
+ ### Branch Strategy
30
+ - `main` - Production-ready code
31
+ - `feature/*` - New features
32
+ - `fix/*` - Bug fixes
33
+ - `spike/*` - Research and experimentation
34
+
35
+ ### Code Review Checklist
36
+ - [ ] All tests pass
37
+ - [ ] Documentation updated
38
+ - [ ] No sensitive data in code
39
+ - [ ] Ruff and mypy pass
40
+ - [ ] Feature tracked in HtmlGraph
41
+ {% endblock %}
42
+
43
+ {# You can also override other blocks:
44
+ - introduction: Project-specific intro
45
+ - deployment: Team-specific deployment process
46
+ - footer: Custom footer with team info
47
+ #}
@@ -0,0 +1,161 @@
1
+ """
2
+ Version checking and interactive upgrade workflows.
3
+ """
4
+
5
+ from pathlib import Path
6
+ from typing import TYPE_CHECKING
7
+
8
+ from htmlgraph.docs.docs_version import get_current_doc_version, is_compatible
9
+ from htmlgraph.docs.metadata import DocsMetadata
10
+ from htmlgraph.docs.migrations import get_migration
11
+
12
+ if TYPE_CHECKING:
13
+ from htmlgraph.docs.migrations import MigrationScript
14
+
15
+
16
+ def check_docs_version(htmlgraph_dir: Path) -> tuple[bool, str | None]:
17
+ """Check if docs version is compatible.
18
+
19
+ Args:
20
+ htmlgraph_dir: Path to .htmlgraph directory
21
+
22
+ Returns:
23
+ Tuple of (is_compatible, message)
24
+ - is_compatible: True if compatible
25
+ - message: Optional warning/error message
26
+ """
27
+ metadata = DocsMetadata.load(htmlgraph_dir)
28
+ current_version = get_current_doc_version()
29
+
30
+ if metadata.schema_version == current_version:
31
+ return True, None
32
+
33
+ if is_compatible(metadata.schema_version, current_version):
34
+ return (
35
+ True,
36
+ f"⚠️ Docs version {metadata.schema_version} is supported but outdated (current: {current_version})",
37
+ )
38
+
39
+ return (
40
+ False,
41
+ f"❌ Docs version {metadata.schema_version} is incompatible with package (requires: {current_version})",
42
+ )
43
+
44
+
45
+ def upgrade_docs_interactive(htmlgraph_dir: Path) -> None:
46
+ """Interactive upgrade workflow with user prompts.
47
+
48
+ Args:
49
+ htmlgraph_dir: Path to .htmlgraph directory
50
+ """
51
+ metadata = DocsMetadata.load(htmlgraph_dir)
52
+ current_version = get_current_doc_version()
53
+
54
+ if metadata.schema_version == current_version:
55
+ print("✅ Docs are up to date")
56
+ return
57
+
58
+ # Get migration script
59
+ migration = get_migration(metadata.schema_version, current_version)
60
+ if not migration:
61
+ print(
62
+ f"❌ No migration available from v{metadata.schema_version} to v{current_version}"
63
+ )
64
+ return
65
+
66
+ # Show user their options
67
+ print(
68
+ f"""
69
+ 📋 Documentation Upgrade Available
70
+ Current: v{metadata.schema_version}
71
+ Target: v{current_version}
72
+
73
+ Options:
74
+ 1. Auto-migrate (preserves customizations)
75
+ 2. Side-by-side (test before committing)
76
+ 3. Manual migration (view diff first)
77
+ 4. Skip (stay on v{metadata.schema_version})
78
+ """
79
+ )
80
+
81
+ choice = input("Choose option (1-4): ").strip()
82
+
83
+ if choice == "1":
84
+ _auto_migrate(htmlgraph_dir, migration)
85
+ elif choice == "2":
86
+ _side_by_side_migrate(htmlgraph_dir, migration)
87
+ elif choice == "3":
88
+ _show_diff_for_manual(htmlgraph_dir, migration)
89
+ else:
90
+ print("⏭️ Skipping migration")
91
+
92
+
93
+ def _auto_migrate(htmlgraph_dir: Path, migration: "MigrationScript") -> None: # type: ignore[name-defined]
94
+ """Automatically migrate with backup.
95
+
96
+ Args:
97
+ htmlgraph_dir: Path to .htmlgraph directory
98
+ migration: MigrationScript instance
99
+ """
100
+ backup_dir = htmlgraph_dir / ".docs-backups"
101
+ backup_dir.mkdir(exist_ok=True)
102
+
103
+ print("🚀 Starting auto-migration...")
104
+ success = migration.migrate(htmlgraph_dir, backup_dir)
105
+
106
+ if success:
107
+ print("✅ Migration complete!")
108
+ print(f"📦 Backup saved to {backup_dir}")
109
+ else:
110
+ print("❌ Migration failed. Docs unchanged.")
111
+
112
+
113
+ def _side_by_side_migrate(htmlgraph_dir: Path, migration: "MigrationScript") -> None: # type: ignore[name-defined]
114
+ """Create side-by-side versions for testing.
115
+
116
+ Args:
117
+ htmlgraph_dir: Path to .htmlgraph directory
118
+ migration: MigrationScript instance
119
+ """
120
+ print("📋 Creating side-by-side versions...")
121
+ print("⚠️ Side-by-side migration not yet implemented")
122
+ print(" Use option 1 (auto-migrate) or 3 (manual) instead")
123
+
124
+
125
+ def _show_diff_for_manual(htmlgraph_dir: Path, migration: "MigrationScript") -> None: # type: ignore[name-defined]
126
+ """Show diff preview for manual migration.
127
+
128
+ Args:
129
+ htmlgraph_dir: Path to .htmlgraph directory
130
+ migration: MigrationScript instance
131
+ """
132
+ print("📊 Showing migration preview...")
133
+ print("⚠️ Diff preview not yet implemented")
134
+ print(" Use option 1 (auto-migrate) instead")
135
+
136
+
137
+ def check_version_on_init(htmlgraph_dir: Path, auto_upgrade: bool = False) -> bool:
138
+ """Check version compatibility on SDK initialization.
139
+
140
+ Args:
141
+ htmlgraph_dir: Path to .htmlgraph directory
142
+ auto_upgrade: If True, automatically upgrade if safe
143
+
144
+ Returns:
145
+ True if compatible or upgraded successfully
146
+ """
147
+ compatible, message = check_docs_version(htmlgraph_dir)
148
+
149
+ if compatible and message:
150
+ # Compatible but outdated
151
+ print(message)
152
+ if auto_upgrade:
153
+ upgrade_docs_interactive(htmlgraph_dir)
154
+ return True
155
+
156
+ if not compatible:
157
+ print(message)
158
+ print("\nRun `uv run htmlgraph docs upgrade` to migrate.")
159
+ return False
160
+
161
+ return True
htmlgraph/learning.py CHANGED
@@ -150,68 +150,86 @@ class LearningPersistence:
150
150
  return health
151
151
 
152
152
  def persist_patterns(self, min_count: int = 2) -> list[str]:
153
- """Detect and persist workflow patterns from sessions.
153
+ """Detect and persist workflow patterns IN SESSIONS (not as separate files).
154
+
155
+ This refactored version stores patterns inline within session HTML files
156
+ to avoid creating 2,890+ individual pattern files.
154
157
 
155
158
  Args:
156
159
  min_count: Minimum occurrences to persist a pattern
157
160
 
158
161
  Returns:
159
- List of persisted pattern IDs
162
+ List of session IDs that had patterns updated
160
163
  """
161
- # Collect tool sequences from all sessions
162
- # Use session_manager to get full Session objects with activity_log
163
- sequences: list[tuple[Any, ...]] = []
164
+ # Collect tool sequences per session (not globally)
165
+ session_ids_updated: list[str] = []
166
+
164
167
  for session in self.sdk.session_manager.session_converter.load_all():
165
- if session.activity_log:
166
- tools = [
167
- a.tool if not isinstance(a, dict) else a.get("tool", "")
168
- for a in session.activity_log
169
- ]
170
- # Extract 3-tool sequences
171
- for i in range(len(tools) - 2):
172
- seq = tools[i : i + 3]
173
- if all(seq): # No empty tools
174
- sequences.append(tuple(seq))
175
-
176
- # Count sequences
177
- seq_counts = Counter(sequences)
178
-
179
- # Persist patterns with min_count
180
- pattern_ids: list[str | Any] = []
181
- for seq, count in seq_counts.items(): # type: ignore[assignment]
182
- if count >= min_count:
183
- # Check if pattern already exists
184
- existing = self.sdk.patterns.find_by_sequence(list(seq))
185
- if existing:
186
- # Update count - use properties dict for updates
187
- pattern = existing[0]
188
- pattern.properties["detection_count"] = count
189
- pattern.properties["last_detected"] = datetime.now().isoformat()
190
- self.sdk.patterns.update(pattern)
191
- pattern_ids.append(pattern.id)
192
- else:
193
- # Create new pattern using builder methods
194
- pattern_type = self._classify_pattern(list(seq))
195
- now = datetime.now()
196
- pattern = (
197
- self.sdk.patterns.create(f"Pattern: {' -> '.join(seq)}")
198
- .set_sequence(list(seq))
199
- .set_pattern_type(pattern_type)
200
- .set_detection_count(count)
201
- .set_first_detected(now)
202
- .set_last_detected(now)
203
- .save()
168
+ if not session.activity_log:
169
+ continue
170
+
171
+ # Extract 3-tool sequences from this session
172
+ tools = [
173
+ a.tool if not isinstance(a, dict) else a.get("tool", "")
174
+ for a in session.activity_log
175
+ ]
176
+
177
+ # Count sequences in this session
178
+ sequences: list[tuple[Any, ...]] = []
179
+ for i in range(len(tools) - 2):
180
+ seq = tools[i : i + 3]
181
+ if all(seq): # No empty tools
182
+ sequences.append(tuple(seq))
183
+
184
+ seq_counts = Counter(sequences)
185
+
186
+ # Update session's detected_patterns
187
+ patterns_updated = False
188
+ for seq, count in seq_counts.items(): # type: ignore[assignment]
189
+ if count >= min_count:
190
+ # Check if pattern already exists in this session
191
+ existing = next(
192
+ (
193
+ p
194
+ for p in session.detected_patterns
195
+ if p.get("sequence") == list(seq)
196
+ ),
197
+ None,
204
198
  )
205
- pattern_ids.append(pattern.id)
199
+
200
+ if existing:
201
+ # Update existing pattern
202
+ existing["detection_count"] = count
203
+ existing["last_detected"] = datetime.now().isoformat()
204
+ patterns_updated = True
205
+ else:
206
+ # Add new pattern to session
207
+ pattern_type = self._classify_pattern(list(seq))
208
+ now = datetime.now()
209
+ session.detected_patterns.append(
210
+ {
211
+ "sequence": list(seq),
212
+ "pattern_type": pattern_type,
213
+ "detection_count": count,
214
+ "first_detected": now.isoformat(),
215
+ "last_detected": now.isoformat(),
216
+ }
217
+ )
218
+ patterns_updated = True
219
+
220
+ # Save updated session if patterns were modified
221
+ if patterns_updated:
222
+ self.sdk.session_manager.session_converter.save(session)
223
+ session_ids_updated.append(session.id)
206
224
 
207
225
  # Also persist parallel patterns
208
- parallel_pattern_ids = self.persist_parallel_patterns(min_count=min_count)
209
- pattern_ids.extend(parallel_pattern_ids)
226
+ parallel_session_ids = self.persist_parallel_patterns(min_count=min_count)
227
+ session_ids_updated.extend(parallel_session_ids)
210
228
 
211
- return pattern_ids
229
+ return session_ids_updated
212
230
 
213
231
  def persist_parallel_patterns(self, min_count: int = 2) -> list[str]:
214
- """Detect and persist parallel execution patterns from sessions.
232
+ """Detect and persist parallel execution patterns IN SESSIONS.
215
233
 
216
234
  Identifies when multiple tools are invoked in parallel (same parent_activity_id).
217
235
  This is especially useful for detecting orchestrator patterns like parallel Task delegation.
@@ -220,12 +238,11 @@ class LearningPersistence:
220
238
  min_count: Minimum occurrences to persist a pattern
221
239
 
222
240
  Returns:
223
- List of persisted pattern IDs
241
+ List of session IDs that had parallel patterns updated
224
242
  """
225
243
  from collections import defaultdict
226
244
 
227
- # Collect parallel execution groups from all sessions
228
- parallel_patterns: list[tuple[str, ...]] = []
245
+ session_ids_updated: list[str] = []
229
246
 
230
247
  for session in self.sdk.session_manager.session_converter.load_all():
231
248
  if not session.activity_log:
@@ -242,12 +259,12 @@ class LearningPersistence:
242
259
  if parent_id: # Only track activities with a parent
243
260
  parent_groups[parent_id].append(activity)
244
261
 
245
- # Detect parallel patterns (2+ activities with same parent)
262
+ # Collect parallel patterns for this session
263
+ parallel_patterns: list[tuple[str, ...]] = []
246
264
  for parent_id, activities in parent_groups.items():
247
265
  if len(activities) < 2:
248
266
  continue
249
267
 
250
- # Check if activities overlap in time (parallel execution)
251
268
  # Sort by timestamp
252
269
  sorted_activities = sorted(
253
270
  activities,
@@ -268,50 +285,57 @@ class LearningPersistence:
268
285
  if all(tools):
269
286
  parallel_patterns.append(tools)
270
287
 
271
- # Count parallel patterns
272
- pattern_counts = Counter(parallel_patterns)
273
-
274
- # Persist patterns with min_count
275
- pattern_ids: list[str | Any] = []
276
- for tools, count in pattern_counts.items():
277
- if count >= min_count:
278
- # Create a pattern name that indicates parallelism
279
- tool_names = list(tools)
280
- pattern_name = f"Parallel[{len(tools)}]: {' || '.join(tools)}"
281
-
282
- # Check if pattern already exists
283
- existing = self.sdk.patterns.find_by_sequence(tool_names)
284
- if existing:
285
- # Update existing pattern
286
- pattern = existing[0]
287
- pattern.properties = pattern.properties or {}
288
- pattern.properties["detection_count"] = count
289
- pattern.properties["last_detected"] = datetime.now().isoformat()
290
- pattern.properties["parallel_count"] = len(tools)
291
- pattern.properties["is_parallel"] = True
292
- self.sdk.patterns.update(pattern)
293
- pattern_ids.append(pattern.id)
294
- else:
295
- # Create new parallel pattern
296
- pattern_type = self._classify_pattern(tool_names, is_parallel=True)
297
- now = datetime.now()
298
- pattern = (
299
- self.sdk.patterns.create(pattern_name)
300
- .set_sequence(tool_names)
301
- .set_pattern_type(pattern_type)
302
- .set_detection_count(count)
303
- .set_first_detected(now)
304
- .set_last_detected(now)
305
- .save()
288
+ # Count parallel patterns in this session
289
+ pattern_counts = Counter(parallel_patterns)
290
+
291
+ # Update session's detected_patterns with parallel patterns
292
+ patterns_updated = False
293
+ for tools, count in pattern_counts.items():
294
+ if count >= min_count:
295
+ tool_names = list(tools)
296
+
297
+ # Check if pattern already exists in this session
298
+ # Parallel patterns have special naming: "Parallel[N]: tool1 || tool2"
299
+ existing = next(
300
+ (
301
+ p
302
+ for p in session.detected_patterns
303
+ if p.get("sequence") == tool_names
304
+ and p.get("is_parallel", False)
305
+ ),
306
+ None,
306
307
  )
307
- # Mark as parallel in properties
308
- pattern.properties = pattern.properties or {}
309
- pattern.properties["parallel_count"] = len(tools)
310
- pattern.properties["is_parallel"] = True
311
- self.sdk.patterns.update(pattern)
312
- pattern_ids.append(pattern.id)
313
-
314
- return pattern_ids
308
+
309
+ if existing:
310
+ # Update existing parallel pattern
311
+ existing["detection_count"] = count
312
+ existing["last_detected"] = datetime.now().isoformat()
313
+ patterns_updated = True
314
+ else:
315
+ # Add new parallel pattern to session
316
+ pattern_type = self._classify_pattern(
317
+ tool_names, is_parallel=True
318
+ )
319
+ now = datetime.now()
320
+ session.detected_patterns.append(
321
+ {
322
+ "sequence": tool_names,
323
+ "pattern_type": pattern_type,
324
+ "detection_count": count,
325
+ "first_detected": now.isoformat(),
326
+ "last_detected": now.isoformat(),
327
+ "is_parallel": True,
328
+ "parallel_count": len(tools),
329
+ }
330
+ )
331
+ patterns_updated = True
332
+
333
+ # Save updated session if patterns were modified
334
+ if patterns_updated:
335
+ self.sdk.session_manager.session_converter.save(session)
336
+ session_ids_updated.append(session.id)
337
+
338
+ return session_ids_updated
315
339
 
316
340
  def _classify_pattern(self, sequence: list[str], is_parallel: bool = False) -> str:
317
341
  """Classify a pattern as optimal, anti-pattern, or neutral.
htmlgraph/models.py CHANGED
@@ -937,6 +937,21 @@ class Session(BaseModel):
937
937
  transcript_synced_at: datetime | None = None # Last sync timestamp
938
938
  transcript_git_branch: str | None = None # Git branch from transcript
939
939
 
940
+ # Pattern detection (inline storage to avoid file bloat)
941
+ detected_patterns: list[dict[str, Any]] = Field(default_factory=list)
942
+ """
943
+ Patterns detected during this session.
944
+
945
+ Format:
946
+ {
947
+ "sequence": ["Bash", "Read", "Edit"],
948
+ "pattern_type": "neutral", # or "optimal", "anti_pattern"
949
+ "detection_count": 3,
950
+ "first_detected": "2026-01-02T10:00:00",
951
+ "last_detected": "2026-01-02T10:30:00"
952
+ }
953
+ """
954
+
940
955
  def add_activity(self, entry: ActivityEntry) -> None:
941
956
  """Add an activity entry to the log."""
942
957
  self.activity_log.append(entry)
@@ -1378,6 +1393,43 @@ class Session(BaseModel):
1378
1393
  </dl>
1379
1394
  </section>"""
1380
1395
 
1396
+ # Build detected patterns section
1397
+ patterns_html = ""
1398
+ if self.detected_patterns:
1399
+ patterns_html = f"""
1400
+ <section data-detected-patterns>
1401
+ <h3>Detected Patterns ({len(self.detected_patterns)})</h3>
1402
+ <table class="patterns-table">
1403
+ <thead>
1404
+ <tr>
1405
+ <th>Sequence</th>
1406
+ <th>Type</th>
1407
+ <th>Count</th>
1408
+ <th>First/Last Detected</th>
1409
+ </tr>
1410
+ </thead>
1411
+ <tbody>"""
1412
+
1413
+ for pattern in self.detected_patterns:
1414
+ seq_str = " → ".join(pattern.get("sequence", []))
1415
+ pattern_type = pattern.get("pattern_type", "neutral")
1416
+ count = pattern.get("detection_count", 0)
1417
+ first = pattern.get("first_detected", "")
1418
+ last = pattern.get("last_detected", "")
1419
+
1420
+ patterns_html += f"""
1421
+ <tr data-pattern-type="{pattern_type}">
1422
+ <td class="sequence">{seq_str}</td>
1423
+ <td><span class="badge pattern-{pattern_type}">{pattern_type}</span></td>
1424
+ <td>{count}</td>
1425
+ <td>{first} / {last}</td>
1426
+ </tr>"""
1427
+
1428
+ patterns_html += """
1429
+ </tbody>
1430
+ </table>
1431
+ </section>"""
1432
+
1381
1433
  title = self.title or f"Session {self.id}"
1382
1434
 
1383
1435
  return f'''<!DOCTYPE html>
@@ -1406,7 +1458,7 @@ class Session(BaseModel):
1406
1458
  <span class="badge">{self.event_count} events</span>
1407
1459
  </div>
1408
1460
  </header>
1409
- {edges_html}{handoff_html}{context_html}{activity_html}
1461
+ {edges_html}{handoff_html}{context_html}{patterns_html}{activity_html}
1410
1462
  </article>
1411
1463
  </body>
1412
1464
  </html>
htmlgraph/sdk.py CHANGED
@@ -44,7 +44,7 @@ from typing import Any
44
44
 
45
45
  from htmlgraph.agent_detection import detect_agent_name
46
46
  from htmlgraph.agents import AgentInterface
47
- from htmlgraph.analytics import Analytics, DependencyAnalytics
47
+ from htmlgraph.analytics import Analytics, CrossSessionAnalytics, DependencyAnalytics
48
48
  from htmlgraph.collections import (
49
49
  BaseCollection,
50
50
  BugCollection,
@@ -242,6 +242,9 @@ class SDK:
242
242
  # Dependency analytics interface (Advanced graph analytics)
243
243
  self.dep_analytics = DependencyAnalytics(self._graph)
244
244
 
245
+ # Cross-session analytics interface (Git commit-based analytics)
246
+ self.cross_session_analytics = CrossSessionAnalytics(self)
247
+
245
248
  # Context analytics interface (Context usage tracking)
246
249
  self.context = ContextAnalytics(self)
247
250
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: htmlgraph
3
- Version: 0.20.9
3
+ Version: 0.22.0
4
4
  Summary: HTML is All You Need - Graph database on web standards
5
5
  Project-URL: Homepage, https://github.com/Shakes-tzd/htmlgraph
6
6
  Project-URL: Documentation, https://github.com/Shakes-tzd/htmlgraph#readme