htmlgraph 0.24.2__py3-none-any.whl → 0.26.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (112) hide show
  1. htmlgraph/__init__.py +20 -1
  2. htmlgraph/agent_detection.py +26 -10
  3. htmlgraph/analytics/cross_session.py +4 -3
  4. htmlgraph/analytics/work_type.py +52 -16
  5. htmlgraph/analytics_index.py +51 -19
  6. htmlgraph/api/__init__.py +3 -0
  7. htmlgraph/api/main.py +2263 -0
  8. htmlgraph/api/static/htmx.min.js +1 -0
  9. htmlgraph/api/static/style-redesign.css +1344 -0
  10. htmlgraph/api/static/style.css +1079 -0
  11. htmlgraph/api/templates/dashboard-redesign.html +812 -0
  12. htmlgraph/api/templates/dashboard.html +794 -0
  13. htmlgraph/api/templates/partials/activity-feed-hierarchical.html +326 -0
  14. htmlgraph/api/templates/partials/activity-feed.html +1020 -0
  15. htmlgraph/api/templates/partials/agents-redesign.html +317 -0
  16. htmlgraph/api/templates/partials/agents.html +317 -0
  17. htmlgraph/api/templates/partials/event-traces.html +373 -0
  18. htmlgraph/api/templates/partials/features-kanban-redesign.html +509 -0
  19. htmlgraph/api/templates/partials/features.html +509 -0
  20. htmlgraph/api/templates/partials/metrics-redesign.html +346 -0
  21. htmlgraph/api/templates/partials/metrics.html +346 -0
  22. htmlgraph/api/templates/partials/orchestration-redesign.html +443 -0
  23. htmlgraph/api/templates/partials/orchestration.html +163 -0
  24. htmlgraph/api/templates/partials/spawners.html +375 -0
  25. htmlgraph/atomic_ops.py +560 -0
  26. htmlgraph/builders/base.py +55 -1
  27. htmlgraph/builders/bug.py +17 -2
  28. htmlgraph/builders/chore.py +17 -2
  29. htmlgraph/builders/epic.py +17 -2
  30. htmlgraph/builders/feature.py +25 -2
  31. htmlgraph/builders/phase.py +17 -2
  32. htmlgraph/builders/spike.py +27 -2
  33. htmlgraph/builders/track.py +14 -0
  34. htmlgraph/cigs/__init__.py +4 -0
  35. htmlgraph/cigs/reporter.py +818 -0
  36. htmlgraph/cli.py +1427 -401
  37. htmlgraph/cli_commands/__init__.py +1 -0
  38. htmlgraph/cli_commands/feature.py +195 -0
  39. htmlgraph/cli_framework.py +115 -0
  40. htmlgraph/collections/__init__.py +2 -0
  41. htmlgraph/collections/base.py +21 -0
  42. htmlgraph/collections/session.py +189 -0
  43. htmlgraph/collections/spike.py +7 -1
  44. htmlgraph/collections/task_delegation.py +236 -0
  45. htmlgraph/collections/traces.py +482 -0
  46. htmlgraph/config.py +113 -0
  47. htmlgraph/converter.py +41 -0
  48. htmlgraph/cost_analysis/__init__.py +5 -0
  49. htmlgraph/cost_analysis/analyzer.py +438 -0
  50. htmlgraph/dashboard.html +3356 -492
  51. htmlgraph-0.24.2.data/data/htmlgraph/dashboard.html → htmlgraph/dashboard.html.backup +2246 -248
  52. htmlgraph/dashboard.html.bak +7181 -0
  53. htmlgraph/dashboard.html.bak2 +7231 -0
  54. htmlgraph/dashboard.html.bak3 +7232 -0
  55. htmlgraph/db/__init__.py +38 -0
  56. htmlgraph/db/queries.py +790 -0
  57. htmlgraph/db/schema.py +1584 -0
  58. htmlgraph/deploy.py +26 -27
  59. htmlgraph/docs/API_REFERENCE.md +841 -0
  60. htmlgraph/docs/HTTP_API.md +750 -0
  61. htmlgraph/docs/INTEGRATION_GUIDE.md +752 -0
  62. htmlgraph/docs/ORCHESTRATION_PATTERNS.md +710 -0
  63. htmlgraph/docs/README.md +533 -0
  64. htmlgraph/docs/version_check.py +3 -1
  65. htmlgraph/error_handler.py +544 -0
  66. htmlgraph/event_log.py +2 -0
  67. htmlgraph/hooks/.htmlgraph/.session-warning-state.json +6 -0
  68. htmlgraph/hooks/.htmlgraph/agents.json +72 -0
  69. htmlgraph/hooks/.htmlgraph/index.sqlite +0 -0
  70. htmlgraph/hooks/__init__.py +8 -0
  71. htmlgraph/hooks/bootstrap.py +169 -0
  72. htmlgraph/hooks/cigs_pretool_enforcer.py +2 -2
  73. htmlgraph/hooks/concurrent_sessions.py +208 -0
  74. htmlgraph/hooks/context.py +318 -0
  75. htmlgraph/hooks/drift_handler.py +525 -0
  76. htmlgraph/hooks/event_tracker.py +496 -79
  77. htmlgraph/hooks/orchestrator.py +6 -4
  78. htmlgraph/hooks/orchestrator_reflector.py +4 -4
  79. htmlgraph/hooks/post_tool_use_handler.py +257 -0
  80. htmlgraph/hooks/pretooluse.py +473 -6
  81. htmlgraph/hooks/prompt_analyzer.py +637 -0
  82. htmlgraph/hooks/session_handler.py +637 -0
  83. htmlgraph/hooks/state_manager.py +504 -0
  84. htmlgraph/hooks/subagent_stop.py +309 -0
  85. htmlgraph/hooks/task_enforcer.py +39 -0
  86. htmlgraph/hooks/validator.py +15 -11
  87. htmlgraph/models.py +111 -15
  88. htmlgraph/operations/fastapi_server.py +230 -0
  89. htmlgraph/orchestration/headless_spawner.py +344 -29
  90. htmlgraph/orchestration/live_events.py +377 -0
  91. htmlgraph/pydantic_models.py +476 -0
  92. htmlgraph/quality_gates.py +350 -0
  93. htmlgraph/repo_hash.py +511 -0
  94. htmlgraph/sdk.py +348 -10
  95. htmlgraph/server.py +194 -0
  96. htmlgraph/session_hooks.py +300 -0
  97. htmlgraph/session_manager.py +131 -1
  98. htmlgraph/session_registry.py +587 -0
  99. htmlgraph/session_state.py +436 -0
  100. htmlgraph/system_prompts.py +449 -0
  101. htmlgraph/templates/orchestration-view.html +350 -0
  102. htmlgraph/track_builder.py +19 -0
  103. htmlgraph/validation.py +115 -0
  104. htmlgraph-0.26.1.data/data/htmlgraph/dashboard.html +7458 -0
  105. {htmlgraph-0.24.2.dist-info → htmlgraph-0.26.1.dist-info}/METADATA +91 -64
  106. {htmlgraph-0.24.2.dist-info → htmlgraph-0.26.1.dist-info}/RECORD +112 -46
  107. {htmlgraph-0.24.2.data → htmlgraph-0.26.1.data}/data/htmlgraph/styles.css +0 -0
  108. {htmlgraph-0.24.2.data → htmlgraph-0.26.1.data}/data/htmlgraph/templates/AGENTS.md.template +0 -0
  109. {htmlgraph-0.24.2.data → htmlgraph-0.26.1.data}/data/htmlgraph/templates/CLAUDE.md.template +0 -0
  110. {htmlgraph-0.24.2.data → htmlgraph-0.26.1.data}/data/htmlgraph/templates/GEMINI.md.template +0 -0
  111. {htmlgraph-0.24.2.dist-info → htmlgraph-0.26.1.dist-info}/WHEEL +0 -0
  112. {htmlgraph-0.24.2.dist-info → htmlgraph-0.26.1.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,504 @@
1
+ """
2
+ HtmlGraph Hooks State Manager
3
+
4
+ Unified state file management for hook operations:
5
+ - Parent activity tracking (for Skill/Task context)
6
+ - User query event tracking (for parent-child linking)
7
+ - Drift queue management (for auto-classification)
8
+
9
+ This module provides file-based state persistence with:
10
+ - Atomic writes (write to temp, then rename)
11
+ - File locking to prevent concurrent writes
12
+ - Error handling for missing/corrupted files
13
+ - Age-based filtering and cleanup
14
+ - Comprehensive logging
15
+
16
+ File Locations (.htmlgraph/):
17
+ - parent-activity.json: Current parent context (Skill/Task invocation)
18
+ - user-query-event-{SESSION_ID}.json: UserQuery event ID for session
19
+ - drift-queue.json: Classification queue for high-drift activities
20
+ """
21
+
22
+ import json
23
+ import logging
24
+ import os
25
+ import tempfile
26
+ from datetime import datetime, timedelta
27
+ from pathlib import Path
28
+ from typing import Any
29
+
30
+ logger = logging.getLogger(__name__)
31
+
32
+
33
+ class ParentActivityTracker:
34
+ """
35
+ Tracks the active parent activity context for Skill/Task invocations.
36
+
37
+ Parent context allows child tool calls to link to their parent Skill/Task.
38
+ Parent activities automatically expire after 5 minutes of inactivity.
39
+
40
+ File: parent-activity.json (single entry)
41
+ ```json
42
+ {
43
+ "parent_id": "evt-xyz123",
44
+ "tool": "Task",
45
+ "timestamp": "2025-01-10T12:34:56Z"
46
+ }
47
+ ```
48
+ """
49
+
50
+ def __init__(self, graph_dir: Path):
51
+ """
52
+ Initialize parent activity tracker.
53
+
54
+ Args:
55
+ graph_dir: Path to .htmlgraph directory
56
+ """
57
+ self.graph_dir = Path(graph_dir)
58
+ self.file_path = self.graph_dir / "parent-activity.json"
59
+ self._ensure_graph_dir()
60
+
61
+ def _ensure_graph_dir(self) -> None:
62
+ """Ensure .htmlgraph directory exists."""
63
+ self.graph_dir.mkdir(parents=True, exist_ok=True)
64
+
65
+ def load(self, max_age_minutes: int = 5) -> dict[str, Any]:
66
+ """
67
+ Load parent activity state.
68
+
69
+ Automatically filters out stale parent activities older than max_age_minutes.
70
+ This allows long-running parent contexts (like Tasks) to timeout naturally.
71
+
72
+ Args:
73
+ max_age_minutes: Maximum age in minutes before activity is considered stale
74
+ (default: 5 minutes)
75
+
76
+ Returns:
77
+ Parent activity dict with keys: parent_id, tool, timestamp
78
+ Empty dict if file missing or stale
79
+ """
80
+ if not self.file_path.exists():
81
+ return {}
82
+
83
+ try:
84
+ with open(self.file_path) as f:
85
+ data: dict[str, object] = json.load(f)
86
+
87
+ # Validate timestamp and check if stale
88
+ if data.get("timestamp"):
89
+ ts = datetime.fromisoformat(data["timestamp"]) # type: ignore[arg-type]
90
+ age = datetime.now() - ts
91
+ if age > timedelta(minutes=max_age_minutes):
92
+ logger.debug(
93
+ f"Parent activity stale ({age.total_seconds():.0f}s > {max_age_minutes}min)"
94
+ )
95
+ return {}
96
+
97
+ logger.debug(f"Loaded parent activity: {data.get('parent_id')}")
98
+ return data # type: ignore[return-value]
99
+
100
+ except json.JSONDecodeError:
101
+ logger.warning("Corrupted parent-activity.json, returning empty state")
102
+ return {}
103
+ except (ValueError, KeyError, OSError) as e:
104
+ logger.warning(f"Error loading parent activity: {e}")
105
+ return {}
106
+
107
+ def save(self, parent_id: str, tool: str) -> None:
108
+ """
109
+ Save parent activity context.
110
+
111
+ Creates or updates parent-activity.json with the current parent context.
112
+ Uses atomic write to prevent corruption from concurrent access.
113
+
114
+ Args:
115
+ parent_id: Event ID of parent activity (e.g., "evt-xyz123")
116
+ tool: Tool name that created parent context (e.g., "Task", "Skill")
117
+ """
118
+ try:
119
+ data = {
120
+ "parent_id": parent_id,
121
+ "tool": tool,
122
+ "timestamp": datetime.now().isoformat(),
123
+ }
124
+
125
+ # Atomic write: write to temp file, then rename
126
+ with tempfile.NamedTemporaryFile(
127
+ mode="w",
128
+ dir=self.graph_dir,
129
+ delete=False,
130
+ suffix=".json",
131
+ ) as tmp:
132
+ json.dump(data, tmp)
133
+ tmp_path = tmp.name
134
+
135
+ # Atomic rename
136
+ os.replace(tmp_path, self.file_path)
137
+ logger.debug(f"Saved parent activity: {parent_id} (tool={tool})")
138
+
139
+ except OSError as e:
140
+ logger.warning(f"Could not save parent activity: {e}")
141
+ except Exception as e:
142
+ logger.error(f"Unexpected error saving parent activity: {e}")
143
+
144
+ def clear(self) -> None:
145
+ """
146
+ Delete parent activity file.
147
+
148
+ Clears the parent context, causing subsequent tool calls to not link
149
+ to a parent activity.
150
+ """
151
+ try:
152
+ self.file_path.unlink(missing_ok=True)
153
+ logger.debug("Cleared parent activity")
154
+ except OSError as e:
155
+ logger.warning(f"Could not clear parent activity: {e}")
156
+
157
+
158
+ class UserQueryEventTracker:
159
+ """
160
+ Tracks the active UserQuery event ID for parent-child linking.
161
+
162
+ Each session maintains its own UserQuery event context to support
163
+ multiple concurrent Claude windows in the same project.
164
+
165
+ UserQuery events expire after 2 minutes (conversation turn boundary),
166
+ allowing natural grouping of tool calls by conversation turn.
167
+
168
+ File: user-query-event-{SESSION_ID}.json (single entry)
169
+ ```json
170
+ {
171
+ "event_id": "evt-abc456",
172
+ "timestamp": "2025-01-10T12:34:56Z"
173
+ }
174
+ ```
175
+ """
176
+
177
+ def __init__(self, graph_dir: Path):
178
+ """
179
+ Initialize user query event tracker.
180
+
181
+ Args:
182
+ graph_dir: Path to .htmlgraph directory
183
+ """
184
+ self.graph_dir = Path(graph_dir)
185
+ self._ensure_graph_dir()
186
+
187
+ def _ensure_graph_dir(self) -> None:
188
+ """Ensure .htmlgraph directory exists."""
189
+ self.graph_dir.mkdir(parents=True, exist_ok=True)
190
+
191
+ def _get_file_path(self, session_id: str) -> Path:
192
+ """Get session-specific user query event file path."""
193
+ return self.graph_dir / f"user-query-event-{session_id}.json"
194
+
195
+ def load(self, session_id: str, max_age_minutes: int = 2) -> str | None:
196
+ """
197
+ Load active UserQuery event ID for a session.
198
+
199
+ Automatically filters out stale events older than max_age_minutes.
200
+ This creates natural conversation turn boundaries when queries timeout.
201
+
202
+ Args:
203
+ session_id: Session ID (e.g., "sess-xyz789")
204
+ max_age_minutes: Maximum age in minutes before event is considered stale
205
+ (default: 2 minutes for conversation turns)
206
+
207
+ Returns:
208
+ Event ID string (e.g., "evt-abc456") or None if missing/stale
209
+ """
210
+ file_path = self._get_file_path(session_id)
211
+ if not file_path.exists():
212
+ return None
213
+
214
+ try:
215
+ with open(file_path) as f:
216
+ data: dict[str, object] = json.load(f)
217
+
218
+ # Validate timestamp and check if stale
219
+ if data.get("timestamp"):
220
+ ts = datetime.fromisoformat(data["timestamp"]) # type: ignore[arg-type]
221
+ age = datetime.now() - ts
222
+ if age > timedelta(minutes=max_age_minutes):
223
+ logger.debug(
224
+ f"UserQuery event stale ({age.total_seconds():.0f}s > {max_age_minutes}min)"
225
+ )
226
+ return None
227
+
228
+ event_id = data.get("event_id")
229
+ logger.debug(f"Loaded UserQuery event: {event_id}")
230
+ return event_id # type: ignore[return-value]
231
+
232
+ except json.JSONDecodeError:
233
+ logger.warning(f"Corrupted user-query-event file for {session_id}")
234
+ return None
235
+ except (ValueError, KeyError, OSError) as e:
236
+ logger.warning(f"Error loading UserQuery event for {session_id}: {e}")
237
+ return None
238
+
239
+ def save(self, session_id: str, event_id: str) -> None:
240
+ """
241
+ Save UserQuery event ID for a session.
242
+
243
+ Creates or updates the session-specific user query event file.
244
+ Uses atomic write to prevent corruption from concurrent access.
245
+
246
+ Args:
247
+ session_id: Session ID (e.g., "sess-xyz789")
248
+ event_id: Event ID to save (e.g., "evt-abc456")
249
+ """
250
+ file_path = self._get_file_path(session_id)
251
+ try:
252
+ data = {
253
+ "event_id": event_id,
254
+ "timestamp": datetime.now().isoformat(),
255
+ }
256
+
257
+ # Atomic write: write to temp file, then rename
258
+ with tempfile.NamedTemporaryFile(
259
+ mode="w",
260
+ dir=self.graph_dir,
261
+ delete=False,
262
+ suffix=".json",
263
+ ) as tmp:
264
+ json.dump(data, tmp)
265
+ tmp_path = tmp.name
266
+
267
+ # Atomic rename
268
+ os.replace(tmp_path, file_path)
269
+ logger.debug(f"Saved UserQuery event: {event_id} (session={session_id})")
270
+
271
+ except OSError as e:
272
+ logger.warning(f"Could not save UserQuery event for {session_id}: {e}")
273
+ except Exception as e:
274
+ logger.error(
275
+ f"Unexpected error saving UserQuery event for {session_id}: {e}"
276
+ )
277
+
278
+ def clear(self, session_id: str) -> None:
279
+ """
280
+ Delete UserQuery event file for a session.
281
+
282
+ Clears the session's UserQuery context, allowing a new conversation turn
283
+ to begin without inheriting the previous turn's parent context.
284
+
285
+ Args:
286
+ session_id: Session ID to clear
287
+ """
288
+ file_path = self._get_file_path(session_id)
289
+ try:
290
+ file_path.unlink(missing_ok=True)
291
+ logger.debug(f"Cleared UserQuery event for {session_id}")
292
+ except OSError as e:
293
+ logger.warning(f"Could not clear UserQuery event for {session_id}: {e}")
294
+
295
+
296
+ class DriftQueueManager:
297
+ """
298
+ Manages the drift classification queue for high-drift activities.
299
+
300
+ The drift queue accumulates activities that exceed the auto-classification
301
+ threshold, triggering classification when thresholds are met.
302
+
303
+ Activities are automatically filtered by age to prevent indefinite accumulation.
304
+
305
+ File: drift-queue.json
306
+ ```json
307
+ {
308
+ "activities": [
309
+ {
310
+ "timestamp": "2025-01-10T12:34:56Z",
311
+ "tool": "Read",
312
+ "summary": "Read: /path/to/file.py",
313
+ "file_paths": ["/path/to/file.py"],
314
+ "drift_score": 0.87,
315
+ "feature_id": "feat-xyz123"
316
+ }
317
+ ],
318
+ "last_classification": "2025-01-10T12:30:00Z"
319
+ }
320
+ ```
321
+ """
322
+
323
+ def __init__(self, graph_dir: Path):
324
+ """
325
+ Initialize drift queue manager.
326
+
327
+ Args:
328
+ graph_dir: Path to .htmlgraph directory
329
+ """
330
+ self.graph_dir = Path(graph_dir)
331
+ self.file_path = self.graph_dir / "drift-queue.json"
332
+ self._ensure_graph_dir()
333
+
334
+ def _ensure_graph_dir(self) -> None:
335
+ """Ensure .htmlgraph directory exists."""
336
+ self.graph_dir.mkdir(parents=True, exist_ok=True)
337
+
338
+ def load(self, max_age_hours: int = 48) -> dict[str, Any]:
339
+ """
340
+ Load drift queue and filter by age.
341
+
342
+ Automatically removes activities older than max_age_hours.
343
+ This prevents the queue from growing indefinitely over time.
344
+
345
+ Args:
346
+ max_age_hours: Maximum age in hours before activities are removed
347
+ (default: 48 hours)
348
+
349
+ Returns:
350
+ Queue dict with keys: activities (list), last_classification (timestamp)
351
+ Returns default empty queue if file missing
352
+ """
353
+ if not self.file_path.exists():
354
+ return {"activities": [], "last_classification": None}
355
+
356
+ try:
357
+ with open(self.file_path) as f:
358
+ queue: dict[str, object] = json.load(f)
359
+
360
+ # Filter out stale activities
361
+ cutoff_time = datetime.now() - timedelta(hours=max_age_hours)
362
+ original_count = len(queue.get("activities", [])) # type: ignore[arg-type]
363
+
364
+ fresh_activities = []
365
+ for activity in queue.get("activities", []): # type: ignore[attr-defined]
366
+ try:
367
+ activity_time = datetime.fromisoformat(
368
+ activity.get("timestamp", "")
369
+ )
370
+ if activity_time >= cutoff_time:
371
+ fresh_activities.append(activity)
372
+ except (ValueError, TypeError):
373
+ # Keep activities with invalid timestamps to avoid data loss
374
+ fresh_activities.append(activity)
375
+
376
+ # Update queue if we removed stale entries
377
+ if len(fresh_activities) < original_count:
378
+ queue["activities"] = fresh_activities
379
+ self.save(queue)
380
+ removed = original_count - len(fresh_activities)
381
+ logger.info(
382
+ f"Cleaned {removed} stale drift queue entries (older than {max_age_hours}h)"
383
+ )
384
+
385
+ logger.debug(
386
+ f"Loaded drift queue: {len(fresh_activities)} recent activities"
387
+ )
388
+ return queue
389
+
390
+ except json.JSONDecodeError:
391
+ logger.warning("Corrupted drift-queue.json, returning empty queue")
392
+ return {"activities": [], "last_classification": None}
393
+ except (ValueError, KeyError, OSError) as e:
394
+ logger.warning(f"Error loading drift queue: {e}")
395
+ return {"activities": [], "last_classification": None}
396
+
397
+ def save(self, queue: dict[str, Any]) -> None:
398
+ """
399
+ Save drift queue to file.
400
+
401
+ Persists the queue with all activities and classification metadata.
402
+ Uses atomic write to prevent corruption from concurrent access.
403
+
404
+ Args:
405
+ queue: Queue dict with activities and last_classification timestamp
406
+ """
407
+ try:
408
+ # Atomic write: write to temp file, then rename
409
+ with tempfile.NamedTemporaryFile(
410
+ mode="w",
411
+ dir=self.graph_dir,
412
+ delete=False,
413
+ suffix=".json",
414
+ ) as tmp:
415
+ json.dump(queue, tmp, indent=2, default=str)
416
+ tmp_path = tmp.name
417
+
418
+ # Atomic rename
419
+ os.replace(tmp_path, self.file_path)
420
+ logger.debug(
421
+ f"Saved drift queue: {len(queue.get('activities', []))} activities"
422
+ )
423
+
424
+ except OSError as e:
425
+ logger.warning(f"Could not save drift queue: {e}")
426
+ except Exception as e:
427
+ logger.error(f"Unexpected error saving drift queue: {e}")
428
+
429
+ def add_activity(
430
+ self, activity: dict[str, Any], timestamp: datetime | None = None
431
+ ) -> None:
432
+ """
433
+ Add activity to drift queue.
434
+
435
+ Appends a high-drift activity to the queue for later classification.
436
+ Timestamp defaults to current time if not provided.
437
+
438
+ Args:
439
+ activity: Activity dict with keys: tool, summary, file_paths, drift_score, feature_id
440
+ timestamp: Activity timestamp (defaults to now)
441
+ """
442
+ if timestamp is None:
443
+ timestamp = datetime.now()
444
+
445
+ queue = self.load()
446
+ queue["activities"].append(
447
+ {
448
+ "timestamp": timestamp.isoformat(),
449
+ "tool": activity.get("tool"),
450
+ "summary": activity.get("summary"),
451
+ "file_paths": activity.get("file_paths", []),
452
+ "drift_score": activity.get("drift_score"),
453
+ "feature_id": activity.get("feature_id"),
454
+ }
455
+ )
456
+ self.save(queue)
457
+ logger.debug(
458
+ f"Added activity to drift queue (drift_score={activity.get('drift_score')})"
459
+ )
460
+
461
+ def clear(self) -> None:
462
+ """
463
+ Delete drift queue file.
464
+
465
+ Removes the entire drift queue, typically after classification completes.
466
+ """
467
+ try:
468
+ self.file_path.unlink(missing_ok=True)
469
+ logger.debug("Cleared drift queue")
470
+ except OSError as e:
471
+ logger.warning(f"Could not clear drift queue: {e}")
472
+
473
+ def clear_activities(self) -> None:
474
+ """
475
+ Clear activities from queue while preserving last_classification timestamp.
476
+
477
+ Called after successful classification to remove processed activities
478
+ while keeping track of when the last classification occurred.
479
+ """
480
+ try:
481
+ queue = {
482
+ "activities": [],
483
+ "last_classification": datetime.now().isoformat(),
484
+ }
485
+
486
+ # Preserve existing last_classification if this file already exists
487
+ if self.file_path.exists():
488
+ try:
489
+ with open(self.file_path) as f:
490
+ existing = json.load(f)
491
+ if existing.get("last_classification"):
492
+ queue["last_classification"] = existing[
493
+ "last_classification"
494
+ ]
495
+ except Exception:
496
+ pass
497
+
498
+ self.save(queue)
499
+ logger.debug(
500
+ "Cleared drift queue activities (preserved classification timestamp)"
501
+ )
502
+
503
+ except Exception as e:
504
+ logger.error(f"Error clearing drift queue activities: {e}")