htmlgraph 0.20.9__py3-none-any.whl → 0.21.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
htmlgraph/learning.py CHANGED
@@ -150,68 +150,86 @@ class LearningPersistence:
150
150
  return health
151
151
 
152
152
  def persist_patterns(self, min_count: int = 2) -> list[str]:
153
- """Detect and persist workflow patterns from sessions.
153
+ """Detect and persist workflow patterns IN SESSIONS (not as separate files).
154
+
155
+ This refactored version stores patterns inline within session HTML files
156
+ to avoid creating 2,890+ individual pattern files.
154
157
 
155
158
  Args:
156
159
  min_count: Minimum occurrences to persist a pattern
157
160
 
158
161
  Returns:
159
- List of persisted pattern IDs
162
+ List of session IDs that had patterns updated
160
163
  """
161
- # Collect tool sequences from all sessions
162
- # Use session_manager to get full Session objects with activity_log
163
- sequences: list[tuple[Any, ...]] = []
164
+ # Collect tool sequences per session (not globally)
165
+ session_ids_updated: list[str] = []
166
+
164
167
  for session in self.sdk.session_manager.session_converter.load_all():
165
- if session.activity_log:
166
- tools = [
167
- a.tool if not isinstance(a, dict) else a.get("tool", "")
168
- for a in session.activity_log
169
- ]
170
- # Extract 3-tool sequences
171
- for i in range(len(tools) - 2):
172
- seq = tools[i : i + 3]
173
- if all(seq): # No empty tools
174
- sequences.append(tuple(seq))
175
-
176
- # Count sequences
177
- seq_counts = Counter(sequences)
178
-
179
- # Persist patterns with min_count
180
- pattern_ids: list[str | Any] = []
181
- for seq, count in seq_counts.items(): # type: ignore[assignment]
182
- if count >= min_count:
183
- # Check if pattern already exists
184
- existing = self.sdk.patterns.find_by_sequence(list(seq))
185
- if existing:
186
- # Update count - use properties dict for updates
187
- pattern = existing[0]
188
- pattern.properties["detection_count"] = count
189
- pattern.properties["last_detected"] = datetime.now().isoformat()
190
- self.sdk.patterns.update(pattern)
191
- pattern_ids.append(pattern.id)
192
- else:
193
- # Create new pattern using builder methods
194
- pattern_type = self._classify_pattern(list(seq))
195
- now = datetime.now()
196
- pattern = (
197
- self.sdk.patterns.create(f"Pattern: {' -> '.join(seq)}")
198
- .set_sequence(list(seq))
199
- .set_pattern_type(pattern_type)
200
- .set_detection_count(count)
201
- .set_first_detected(now)
202
- .set_last_detected(now)
203
- .save()
168
+ if not session.activity_log:
169
+ continue
170
+
171
+ # Extract 3-tool sequences from this session
172
+ tools = [
173
+ a.tool if not isinstance(a, dict) else a.get("tool", "")
174
+ for a in session.activity_log
175
+ ]
176
+
177
+ # Count sequences in this session
178
+ sequences: list[tuple[Any, ...]] = []
179
+ for i in range(len(tools) - 2):
180
+ seq = tools[i : i + 3]
181
+ if all(seq): # No empty tools
182
+ sequences.append(tuple(seq))
183
+
184
+ seq_counts = Counter(sequences)
185
+
186
+ # Update session's detected_patterns
187
+ patterns_updated = False
188
+ for seq, count in seq_counts.items(): # type: ignore[assignment]
189
+ if count >= min_count:
190
+ # Check if pattern already exists in this session
191
+ existing = next(
192
+ (
193
+ p
194
+ for p in session.detected_patterns
195
+ if p.get("sequence") == list(seq)
196
+ ),
197
+ None,
204
198
  )
205
- pattern_ids.append(pattern.id)
199
+
200
+ if existing:
201
+ # Update existing pattern
202
+ existing["detection_count"] = count
203
+ existing["last_detected"] = datetime.now().isoformat()
204
+ patterns_updated = True
205
+ else:
206
+ # Add new pattern to session
207
+ pattern_type = self._classify_pattern(list(seq))
208
+ now = datetime.now()
209
+ session.detected_patterns.append(
210
+ {
211
+ "sequence": list(seq),
212
+ "pattern_type": pattern_type,
213
+ "detection_count": count,
214
+ "first_detected": now.isoformat(),
215
+ "last_detected": now.isoformat(),
216
+ }
217
+ )
218
+ patterns_updated = True
219
+
220
+ # Save updated session if patterns were modified
221
+ if patterns_updated:
222
+ self.sdk.session_manager.session_converter.save(session)
223
+ session_ids_updated.append(session.id)
206
224
 
207
225
  # Also persist parallel patterns
208
- parallel_pattern_ids = self.persist_parallel_patterns(min_count=min_count)
209
- pattern_ids.extend(parallel_pattern_ids)
226
+ parallel_session_ids = self.persist_parallel_patterns(min_count=min_count)
227
+ session_ids_updated.extend(parallel_session_ids)
210
228
 
211
- return pattern_ids
229
+ return session_ids_updated
212
230
 
213
231
  def persist_parallel_patterns(self, min_count: int = 2) -> list[str]:
214
- """Detect and persist parallel execution patterns from sessions.
232
+ """Detect and persist parallel execution patterns IN SESSIONS.
215
233
 
216
234
  Identifies when multiple tools are invoked in parallel (same parent_activity_id).
217
235
  This is especially useful for detecting orchestrator patterns like parallel Task delegation.
@@ -220,12 +238,11 @@ class LearningPersistence:
220
238
  min_count: Minimum occurrences to persist a pattern
221
239
 
222
240
  Returns:
223
- List of persisted pattern IDs
241
+ List of session IDs that had parallel patterns updated
224
242
  """
225
243
  from collections import defaultdict
226
244
 
227
- # Collect parallel execution groups from all sessions
228
- parallel_patterns: list[tuple[str, ...]] = []
245
+ session_ids_updated: list[str] = []
229
246
 
230
247
  for session in self.sdk.session_manager.session_converter.load_all():
231
248
  if not session.activity_log:
@@ -242,12 +259,12 @@ class LearningPersistence:
242
259
  if parent_id: # Only track activities with a parent
243
260
  parent_groups[parent_id].append(activity)
244
261
 
245
- # Detect parallel patterns (2+ activities with same parent)
262
+ # Collect parallel patterns for this session
263
+ parallel_patterns: list[tuple[str, ...]] = []
246
264
  for parent_id, activities in parent_groups.items():
247
265
  if len(activities) < 2:
248
266
  continue
249
267
 
250
- # Check if activities overlap in time (parallel execution)
251
268
  # Sort by timestamp
252
269
  sorted_activities = sorted(
253
270
  activities,
@@ -268,50 +285,57 @@ class LearningPersistence:
268
285
  if all(tools):
269
286
  parallel_patterns.append(tools)
270
287
 
271
- # Count parallel patterns
272
- pattern_counts = Counter(parallel_patterns)
273
-
274
- # Persist patterns with min_count
275
- pattern_ids: list[str | Any] = []
276
- for tools, count in pattern_counts.items():
277
- if count >= min_count:
278
- # Create a pattern name that indicates parallelism
279
- tool_names = list(tools)
280
- pattern_name = f"Parallel[{len(tools)}]: {' || '.join(tools)}"
281
-
282
- # Check if pattern already exists
283
- existing = self.sdk.patterns.find_by_sequence(tool_names)
284
- if existing:
285
- # Update existing pattern
286
- pattern = existing[0]
287
- pattern.properties = pattern.properties or {}
288
- pattern.properties["detection_count"] = count
289
- pattern.properties["last_detected"] = datetime.now().isoformat()
290
- pattern.properties["parallel_count"] = len(tools)
291
- pattern.properties["is_parallel"] = True
292
- self.sdk.patterns.update(pattern)
293
- pattern_ids.append(pattern.id)
294
- else:
295
- # Create new parallel pattern
296
- pattern_type = self._classify_pattern(tool_names, is_parallel=True)
297
- now = datetime.now()
298
- pattern = (
299
- self.sdk.patterns.create(pattern_name)
300
- .set_sequence(tool_names)
301
- .set_pattern_type(pattern_type)
302
- .set_detection_count(count)
303
- .set_first_detected(now)
304
- .set_last_detected(now)
305
- .save()
288
+ # Count parallel patterns in this session
289
+ pattern_counts = Counter(parallel_patterns)
290
+
291
+ # Update session's detected_patterns with parallel patterns
292
+ patterns_updated = False
293
+ for tools, count in pattern_counts.items():
294
+ if count >= min_count:
295
+ tool_names = list(tools)
296
+
297
+ # Check if pattern already exists in this session
298
+ # Parallel patterns have special naming: "Parallel[N]: tool1 || tool2"
299
+ existing = next(
300
+ (
301
+ p
302
+ for p in session.detected_patterns
303
+ if p.get("sequence") == tool_names
304
+ and p.get("is_parallel", False)
305
+ ),
306
+ None,
306
307
  )
307
- # Mark as parallel in properties
308
- pattern.properties = pattern.properties or {}
309
- pattern.properties["parallel_count"] = len(tools)
310
- pattern.properties["is_parallel"] = True
311
- self.sdk.patterns.update(pattern)
312
- pattern_ids.append(pattern.id)
313
-
314
- return pattern_ids
308
+
309
+ if existing:
310
+ # Update existing parallel pattern
311
+ existing["detection_count"] = count
312
+ existing["last_detected"] = datetime.now().isoformat()
313
+ patterns_updated = True
314
+ else:
315
+ # Add new parallel pattern to session
316
+ pattern_type = self._classify_pattern(
317
+ tool_names, is_parallel=True
318
+ )
319
+ now = datetime.now()
320
+ session.detected_patterns.append(
321
+ {
322
+ "sequence": tool_names,
323
+ "pattern_type": pattern_type,
324
+ "detection_count": count,
325
+ "first_detected": now.isoformat(),
326
+ "last_detected": now.isoformat(),
327
+ "is_parallel": True,
328
+ "parallel_count": len(tools),
329
+ }
330
+ )
331
+ patterns_updated = True
332
+
333
+ # Save updated session if patterns were modified
334
+ if patterns_updated:
335
+ self.sdk.session_manager.session_converter.save(session)
336
+ session_ids_updated.append(session.id)
337
+
338
+ return session_ids_updated
315
339
 
316
340
  def _classify_pattern(self, sequence: list[str], is_parallel: bool = False) -> str:
317
341
  """Classify a pattern as optimal, anti-pattern, or neutral.
htmlgraph/models.py CHANGED
@@ -937,6 +937,21 @@ class Session(BaseModel):
937
937
  transcript_synced_at: datetime | None = None # Last sync timestamp
938
938
  transcript_git_branch: str | None = None # Git branch from transcript
939
939
 
940
+ # Pattern detection (inline storage to avoid file bloat)
941
+ detected_patterns: list[dict[str, Any]] = Field(default_factory=list)
942
+ """
943
+ Patterns detected during this session.
944
+
945
+ Format:
946
+ {
947
+ "sequence": ["Bash", "Read", "Edit"],
948
+ "pattern_type": "neutral", # or "optimal", "anti_pattern"
949
+ "detection_count": 3,
950
+ "first_detected": "2026-01-02T10:00:00",
951
+ "last_detected": "2026-01-02T10:30:00"
952
+ }
953
+ """
954
+
940
955
  def add_activity(self, entry: ActivityEntry) -> None:
941
956
  """Add an activity entry to the log."""
942
957
  self.activity_log.append(entry)
@@ -1378,6 +1393,43 @@ class Session(BaseModel):
1378
1393
  </dl>
1379
1394
  </section>"""
1380
1395
 
1396
+ # Build detected patterns section
1397
+ patterns_html = ""
1398
+ if self.detected_patterns:
1399
+ patterns_html = f"""
1400
+ <section data-detected-patterns>
1401
+ <h3>Detected Patterns ({len(self.detected_patterns)})</h3>
1402
+ <table class="patterns-table">
1403
+ <thead>
1404
+ <tr>
1405
+ <th>Sequence</th>
1406
+ <th>Type</th>
1407
+ <th>Count</th>
1408
+ <th>First/Last Detected</th>
1409
+ </tr>
1410
+ </thead>
1411
+ <tbody>"""
1412
+
1413
+ for pattern in self.detected_patterns:
1414
+ seq_str = " → ".join(pattern.get("sequence", []))
1415
+ pattern_type = pattern.get("pattern_type", "neutral")
1416
+ count = pattern.get("detection_count", 0)
1417
+ first = pattern.get("first_detected", "")
1418
+ last = pattern.get("last_detected", "")
1419
+
1420
+ patterns_html += f"""
1421
+ <tr data-pattern-type="{pattern_type}">
1422
+ <td class="sequence">{seq_str}</td>
1423
+ <td><span class="badge pattern-{pattern_type}">{pattern_type}</span></td>
1424
+ <td>{count}</td>
1425
+ <td>{first} / {last}</td>
1426
+ </tr>"""
1427
+
1428
+ patterns_html += """
1429
+ </tbody>
1430
+ </table>
1431
+ </section>"""
1432
+
1381
1433
  title = self.title or f"Session {self.id}"
1382
1434
 
1383
1435
  return f'''<!DOCTYPE html>
@@ -1406,7 +1458,7 @@ class Session(BaseModel):
1406
1458
  <span class="badge">{self.event_count} events</span>
1407
1459
  </div>
1408
1460
  </header>
1409
- {edges_html}{handoff_html}{context_html}{activity_html}
1461
+ {edges_html}{handoff_html}{context_html}{patterns_html}{activity_html}
1410
1462
  </article>
1411
1463
  </body>
1412
1464
  </html>
htmlgraph/sdk.py CHANGED
@@ -44,7 +44,7 @@ from typing import Any
44
44
 
45
45
  from htmlgraph.agent_detection import detect_agent_name
46
46
  from htmlgraph.agents import AgentInterface
47
- from htmlgraph.analytics import Analytics, DependencyAnalytics
47
+ from htmlgraph.analytics import Analytics, CrossSessionAnalytics, DependencyAnalytics
48
48
  from htmlgraph.collections import (
49
49
  BaseCollection,
50
50
  BugCollection,
@@ -242,6 +242,9 @@ class SDK:
242
242
  # Dependency analytics interface (Advanced graph analytics)
243
243
  self.dep_analytics = DependencyAnalytics(self._graph)
244
244
 
245
+ # Cross-session analytics interface (Git commit-based analytics)
246
+ self.cross_session_analytics = CrossSessionAnalytics(self)
247
+
245
248
  # Context analytics interface (Context usage tracking)
246
249
  self.context = ContextAnalytics(self)
247
250
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: htmlgraph
3
- Version: 0.20.9
3
+ Version: 0.21.0
4
4
  Summary: HTML is All You Need - Graph database on web standards
5
5
  Project-URL: Homepage, https://github.com/Shakes-tzd/htmlgraph
6
6
  Project-URL: Documentation, https://github.com/Shakes-tzd/htmlgraph#readme
@@ -1,12 +1,12 @@
1
- htmlgraph/__init__.py,sha256=NE0E3gKujZpGFq5RtICsv15BVCLwgd15_ydZab_pB9c,4979
1
+ htmlgraph/__init__.py,sha256=AbgvM2vQ2nTI1DHlLsxmhhXZgrxmLxgwr5stbc25ZSc,4979
2
2
  htmlgraph/agent_detection.py,sha256=PAYo7rU3N_y1cGRd7Dwjh5Wgu-QZ7ENblX_yOzU-gJ0,2749
3
3
  htmlgraph/agent_registry.py,sha256=Usa_35by7p5gtpvHO7K3AcGimnorw-FzgPVa3cWTQ58,9448
4
4
  htmlgraph/agents.py,sha256=Yvu6x1nOfrW2WhRTAHiCuSpvqoVJXx1Mkzd59kwEczw,33466
5
5
  htmlgraph/analytics_index.py,sha256=ba6Y4H_NNOCxI_Z4U7wSgBFFairf4IJT74WcM1PoZuI,30594
6
6
  htmlgraph/attribute_index.py,sha256=cBZUV4YfGnhh6lF59aYPCdNrRr1hK__BzSKCueSDUhQ,6593
7
- htmlgraph/cli.py,sha256=wYoCjciu1cHqwZe84u2TgO7lS_YyTs4yX5ipLLUqUfo,173218
7
+ htmlgraph/cli.py,sha256=6CwzcFZ6tOlYCS1521gvFkwVw4qb9XSLvq-eqb2Mz8I,182068
8
8
  htmlgraph/context_analytics.py,sha256=CaLu0o2uSr6rlBM5YeaFZe7grgsy7_Hx10qdXuNcdao,11344
9
- htmlgraph/converter.py,sha256=Yeg31k2tiVlQLVndGIBxQT21YvepDIDIuswZRicYR10,20630
9
+ htmlgraph/converter.py,sha256=OfcydZcJqvr2jpMxvAD4wcq8o4NXC7w4X4QzdDiYq8k,22277
10
10
  htmlgraph/dashboard.html,sha256=rkZYjSnPbUuAm35QMpCNWemenYqQTdkkumCX2hhe8Dc,173537
11
11
  htmlgraph/dependency_models.py,sha256=eKpBz9y_pTE5E8baESqHyGUDj5-uXokVd2Bx3ZogAyM,4313
12
12
  htmlgraph/deploy.py,sha256=kM_IMa3PmKpQf4YVH57aL9uV5IfpVJgaj-IFsgAKIbY,17771
@@ -21,9 +21,9 @@ htmlgraph/git_events.py,sha256=62lmhGc7W1KGXAcW_Efpd_v2nIuUl0a4Ggpf73RyRmY,20271
21
21
  htmlgraph/graph.py,sha256=XhPuUYwhrCFVBEijjOdcHjjMEUlduba39CyzZ9dJ6X0,68187
22
22
  htmlgraph/ids.py,sha256=ibEC8xW1ZHbAW6ImOKP2wLARgW7nzkxu8voce_hkljk,8389
23
23
  htmlgraph/index.d.ts,sha256=7dvExfA16g1z5Kut8xyHnSUfZ6wiUUwWNy6R7WKiwas,6922
24
- htmlgraph/learning.py,sha256=Esgxovf7dVtubqswJ0T_sdLzcVN49yF8gCJ1AuHgHSg,27865
24
+ htmlgraph/learning.py,sha256=6SsRdz-xJGFPjp7YagpUDTZqqjNKp2wWihcnhwkHys0,28566
25
25
  htmlgraph/mcp_server.py,sha256=AeJeGJEtX5Dqu5rfhKfT5kwF2Oe8V8xCaP8BgMEh86s,24033
26
- htmlgraph/models.py,sha256=yz5GrSRvQCC2Qy2ozOOfNm5Tw6mXaXZaACkajSjJnqg,79911
26
+ htmlgraph/models.py,sha256=tgFFFwOuEe7Yal5I1JTZHJnF625oaHMATEGIvJgrFnk,81818
27
27
  htmlgraph/orchestration.py,sha256=7_oQ4AlHOv14hs6RvLsatJzF-F5gkIbv1EOrmeGPhiw,9699
28
28
  htmlgraph/orchestrator.py,sha256=6mj70vroWjmNmdvQ7jqqRSA9O1rFUNMUYDWPzqkizLk,19697
29
29
  htmlgraph/orchestrator_mode.py,sha256=F6LNZARqieQXUri3CRSq_lsqFbnVeGXJQPno1ZP47O4,9187
@@ -33,7 +33,7 @@ htmlgraph/parser.py,sha256=w5JIYvS8XmUGGsp-YC2ZWAJANS6hvQtvBxOYy9mL-Rs,13934
33
33
  htmlgraph/planning.py,sha256=iqPF9mCVQwOfJ4vuqcF2Y3-yhx9koJZw0cID7CknIug,35903
34
34
  htmlgraph/query_builder.py,sha256=aNtJ05GpGl9yUSSrX0D6pX_AgqlrrH-CulI_oP11PUk,18092
35
35
  htmlgraph/routing.py,sha256=QYDY6bzYPmv6kocAXCqguB1cazN0i_xTo9EVCO3fO2Y,8803
36
- htmlgraph/sdk.py,sha256=048GzLYdnrw6eUvF2B_A9mRFrrP2od_uiTbD0JrUkFo,82401
36
+ htmlgraph/sdk.py,sha256=mcJh1cZO9xB2iPJwFQJRnMb5ylr_qmd7c_YgLvgUWRo,82565
37
37
  htmlgraph/server.py,sha256=ti_ROzVxc3b4TGylHZ5uzfgax3OHyBoyeJxAaeMfdAo,49297
38
38
  htmlgraph/session_manager.py,sha256=8_H29kN6Btii1RfzNpifjjUVTMU0cEeTElFsDC6icLM,89430
39
39
  htmlgraph/session_warning.py,sha256=leAYNp8pOkPFosIvNkY4Y3vjs1j76F3pGktUqQX9tI0,7877
@@ -48,10 +48,16 @@ htmlgraph/transcript_analytics.py,sha256=O-T7SfM3gJIbGFNPlF3gmKjcy--NzPiJt03lvTC
48
48
  htmlgraph/types.py,sha256=EBxCbke3PhORsmIhHrD-f1XG6YPRX-1D5R1OXCp6xt8,8945
49
49
  htmlgraph/watch.py,sha256=xsiZwDVaXZ6vXI_oOZMqMF78aeAgxuhKpxiz98Hryqg,3791
50
50
  htmlgraph/work_type_utils.py,sha256=exA3FnuSmVMMMSBhPYDW-Bq5jGaTDjX7jkckCvnVZ_s,3408
51
- htmlgraph/analytics/__init__.py,sha256=_IaNrc3mY3eNgUv5UYU67sKy2enuBHXL0_vA9njAgH8,287
51
+ htmlgraph/analytics/__init__.py,sha256=G_DvCaiAeYVfMWFri1GOfLNF7PtAhitKa2V9TXHtsds,409
52
52
  htmlgraph/analytics/cli.py,sha256=ujHhMnI13eDJQKBywPNFfUkq_p2vNEG16P_a6vAx0U0,13017
53
+ htmlgraph/analytics/cross_session.py,sha256=KjDohOPigkHrwI_bnDr3v5MIIgx_bSEIIxXK3UcQoaM,20472
53
54
  htmlgraph/analytics/dependency.py,sha256=7Qc5xe0b1QE_P6-l4Z0WtstEmEXc4ZGNbyIEBMoABys,25757
54
55
  htmlgraph/analytics/work_type.py,sha256=nMuUmC0rV4gvu2eovCBuR-JEnsCzk6nWnB4_zIfbJ9s,17594
56
+ htmlgraph/archive/__init__.py,sha256=F5VpZNiKar5J1e6caWhB7P7QQ4xvsyhDHtH65Q3224c,661
57
+ htmlgraph/archive/bloom.py,sha256=8OL-apF3BGW1V1P9vgi6vk_IHlJe3JOxigMRyuLMjys,7058
58
+ htmlgraph/archive/fts.py,sha256=HZSNb0GYwfm_KA6BlR72LcaAfXWsUPAi0f4BC7IJcRo,9193
59
+ htmlgraph/archive/manager.py,sha256=FPYVvgi5xbFIJtveV25KxHmczozfKDZiq0D76k_wHjo,18256
60
+ htmlgraph/archive/search.py,sha256=stfT296q0fqHUtW9pFQ8yJ034jq3dobEt1_qZQ21A4U,7183
55
61
  htmlgraph/builders/__init__.py,sha256=a65xHw2ARqK8oMVS1YNsIS4FAO17nBe2TFcceUqhyc0,949
56
62
  htmlgraph/builders/base.py,sha256=iLAE7oTySX8psnFbX1dauDyhrCNTCXa-D_bpM7Ymh_8,7255
57
63
  htmlgraph/builders/bug.py,sha256=RzLkMPoRvZQkBK3odZ2MYXdUHc-HgTLE4v0lOn6vyJU,3380
@@ -107,12 +113,12 @@ htmlgraph/services/claiming.py,sha256=HcrltEJKN72mxuD7fGuXWeh1U0vwhjMvhZcFc02Eiy
107
113
  htmlgraph/templates/AGENTS.md.template,sha256=f96h7V6ygwj-v-fanVI48eYMxR6t_se4bet1H4ZsDpI,7642
108
114
  htmlgraph/templates/CLAUDE.md.template,sha256=h1kG2hTX2XYig2KszsHBfzrwa_4Cfcq2Pj4SwqzeDlM,1984
109
115
  htmlgraph/templates/GEMINI.md.template,sha256=gAGzE53Avki87BM_otqy5HdcYCoLsHgqaKjVzNzPMX8,1622
110
- htmlgraph-0.20.9.data/data/htmlgraph/dashboard.html,sha256=rkZYjSnPbUuAm35QMpCNWemenYqQTdkkumCX2hhe8Dc,173537
111
- htmlgraph-0.20.9.data/data/htmlgraph/styles.css,sha256=oDUSC8jG-V-hKojOBO9J88hxAeY2wJrBYTq0uCwX_Y4,7135
112
- htmlgraph-0.20.9.data/data/htmlgraph/templates/AGENTS.md.template,sha256=f96h7V6ygwj-v-fanVI48eYMxR6t_se4bet1H4ZsDpI,7642
113
- htmlgraph-0.20.9.data/data/htmlgraph/templates/CLAUDE.md.template,sha256=h1kG2hTX2XYig2KszsHBfzrwa_4Cfcq2Pj4SwqzeDlM,1984
114
- htmlgraph-0.20.9.data/data/htmlgraph/templates/GEMINI.md.template,sha256=gAGzE53Avki87BM_otqy5HdcYCoLsHgqaKjVzNzPMX8,1622
115
- htmlgraph-0.20.9.dist-info/METADATA,sha256=RxbHkLwWMN4ZBOO1Aw7P_gC1goCeH0rK0wGAIPrYbLc,7645
116
- htmlgraph-0.20.9.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
117
- htmlgraph-0.20.9.dist-info/entry_points.txt,sha256=EaUbjA_bbDwEO_XDLEGMeK8aQP-ZnHiUTkLshyKDyB8,98
118
- htmlgraph-0.20.9.dist-info/RECORD,,
116
+ htmlgraph-0.21.0.data/data/htmlgraph/dashboard.html,sha256=rkZYjSnPbUuAm35QMpCNWemenYqQTdkkumCX2hhe8Dc,173537
117
+ htmlgraph-0.21.0.data/data/htmlgraph/styles.css,sha256=oDUSC8jG-V-hKojOBO9J88hxAeY2wJrBYTq0uCwX_Y4,7135
118
+ htmlgraph-0.21.0.data/data/htmlgraph/templates/AGENTS.md.template,sha256=f96h7V6ygwj-v-fanVI48eYMxR6t_se4bet1H4ZsDpI,7642
119
+ htmlgraph-0.21.0.data/data/htmlgraph/templates/CLAUDE.md.template,sha256=h1kG2hTX2XYig2KszsHBfzrwa_4Cfcq2Pj4SwqzeDlM,1984
120
+ htmlgraph-0.21.0.data/data/htmlgraph/templates/GEMINI.md.template,sha256=gAGzE53Avki87BM_otqy5HdcYCoLsHgqaKjVzNzPMX8,1622
121
+ htmlgraph-0.21.0.dist-info/METADATA,sha256=imvjH4WNDmGZNWZ9atUaBoHlxiOfjlAxGrzE0XfHoY8,7645
122
+ htmlgraph-0.21.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
123
+ htmlgraph-0.21.0.dist-info/entry_points.txt,sha256=EaUbjA_bbDwEO_XDLEGMeK8aQP-ZnHiUTkLshyKDyB8,98
124
+ htmlgraph-0.21.0.dist-info/RECORD,,