htmlgraph 0.24.2__py3-none-any.whl → 0.25.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (103) hide show
  1. htmlgraph/__init__.py +20 -1
  2. htmlgraph/agent_detection.py +26 -10
  3. htmlgraph/analytics/cross_session.py +4 -3
  4. htmlgraph/analytics/work_type.py +52 -16
  5. htmlgraph/analytics_index.py +51 -19
  6. htmlgraph/api/__init__.py +3 -0
  7. htmlgraph/api/main.py +2115 -0
  8. htmlgraph/api/static/htmx.min.js +1 -0
  9. htmlgraph/api/static/style-redesign.css +1344 -0
  10. htmlgraph/api/static/style.css +1079 -0
  11. htmlgraph/api/templates/dashboard-redesign.html +812 -0
  12. htmlgraph/api/templates/dashboard.html +783 -0
  13. htmlgraph/api/templates/partials/activity-feed-hierarchical.html +326 -0
  14. htmlgraph/api/templates/partials/activity-feed.html +570 -0
  15. htmlgraph/api/templates/partials/agents-redesign.html +317 -0
  16. htmlgraph/api/templates/partials/agents.html +317 -0
  17. htmlgraph/api/templates/partials/event-traces.html +373 -0
  18. htmlgraph/api/templates/partials/features-kanban-redesign.html +509 -0
  19. htmlgraph/api/templates/partials/features.html +509 -0
  20. htmlgraph/api/templates/partials/metrics-redesign.html +346 -0
  21. htmlgraph/api/templates/partials/metrics.html +346 -0
  22. htmlgraph/api/templates/partials/orchestration-redesign.html +443 -0
  23. htmlgraph/api/templates/partials/orchestration.html +163 -0
  24. htmlgraph/api/templates/partials/spawners.html +375 -0
  25. htmlgraph/atomic_ops.py +560 -0
  26. htmlgraph/builders/base.py +55 -1
  27. htmlgraph/builders/bug.py +17 -2
  28. htmlgraph/builders/chore.py +17 -2
  29. htmlgraph/builders/epic.py +17 -2
  30. htmlgraph/builders/feature.py +25 -2
  31. htmlgraph/builders/phase.py +17 -2
  32. htmlgraph/builders/spike.py +27 -2
  33. htmlgraph/builders/track.py +14 -0
  34. htmlgraph/cigs/__init__.py +4 -0
  35. htmlgraph/cigs/reporter.py +818 -0
  36. htmlgraph/cli.py +1427 -401
  37. htmlgraph/cli_commands/__init__.py +1 -0
  38. htmlgraph/cli_commands/feature.py +195 -0
  39. htmlgraph/cli_framework.py +115 -0
  40. htmlgraph/collections/__init__.py +2 -0
  41. htmlgraph/collections/base.py +21 -0
  42. htmlgraph/collections/session.py +189 -0
  43. htmlgraph/collections/spike.py +7 -1
  44. htmlgraph/collections/task_delegation.py +236 -0
  45. htmlgraph/collections/traces.py +482 -0
  46. htmlgraph/config.py +113 -0
  47. htmlgraph/converter.py +41 -0
  48. htmlgraph/cost_analysis/__init__.py +5 -0
  49. htmlgraph/cost_analysis/analyzer.py +438 -0
  50. htmlgraph/dashboard.html +3315 -492
  51. htmlgraph-0.24.2.data/data/htmlgraph/dashboard.html → htmlgraph/dashboard.html.backup +2246 -248
  52. htmlgraph/dashboard.html.bak +7181 -0
  53. htmlgraph/dashboard.html.bak2 +7231 -0
  54. htmlgraph/dashboard.html.bak3 +7232 -0
  55. htmlgraph/db/__init__.py +38 -0
  56. htmlgraph/db/queries.py +790 -0
  57. htmlgraph/db/schema.py +1334 -0
  58. htmlgraph/deploy.py +26 -27
  59. htmlgraph/docs/API_REFERENCE.md +841 -0
  60. htmlgraph/docs/HTTP_API.md +750 -0
  61. htmlgraph/docs/INTEGRATION_GUIDE.md +752 -0
  62. htmlgraph/docs/ORCHESTRATION_PATTERNS.md +710 -0
  63. htmlgraph/docs/README.md +533 -0
  64. htmlgraph/docs/version_check.py +3 -1
  65. htmlgraph/error_handler.py +544 -0
  66. htmlgraph/event_log.py +2 -0
  67. htmlgraph/hooks/__init__.py +8 -0
  68. htmlgraph/hooks/bootstrap.py +169 -0
  69. htmlgraph/hooks/context.py +271 -0
  70. htmlgraph/hooks/drift_handler.py +521 -0
  71. htmlgraph/hooks/event_tracker.py +405 -15
  72. htmlgraph/hooks/post_tool_use_handler.py +257 -0
  73. htmlgraph/hooks/pretooluse.py +476 -6
  74. htmlgraph/hooks/prompt_analyzer.py +648 -0
  75. htmlgraph/hooks/session_handler.py +583 -0
  76. htmlgraph/hooks/state_manager.py +501 -0
  77. htmlgraph/hooks/subagent_stop.py +309 -0
  78. htmlgraph/hooks/task_enforcer.py +39 -0
  79. htmlgraph/models.py +111 -15
  80. htmlgraph/operations/fastapi_server.py +230 -0
  81. htmlgraph/orchestration/headless_spawner.py +22 -14
  82. htmlgraph/pydantic_models.py +476 -0
  83. htmlgraph/quality_gates.py +350 -0
  84. htmlgraph/repo_hash.py +511 -0
  85. htmlgraph/sdk.py +348 -10
  86. htmlgraph/server.py +194 -0
  87. htmlgraph/session_hooks.py +300 -0
  88. htmlgraph/session_manager.py +131 -1
  89. htmlgraph/session_registry.py +587 -0
  90. htmlgraph/session_state.py +436 -0
  91. htmlgraph/system_prompts.py +449 -0
  92. htmlgraph/templates/orchestration-view.html +350 -0
  93. htmlgraph/track_builder.py +19 -0
  94. htmlgraph/validation.py +115 -0
  95. htmlgraph-0.25.0.data/data/htmlgraph/dashboard.html +7417 -0
  96. {htmlgraph-0.24.2.dist-info → htmlgraph-0.25.0.dist-info}/METADATA +91 -64
  97. {htmlgraph-0.24.2.dist-info → htmlgraph-0.25.0.dist-info}/RECORD +103 -42
  98. {htmlgraph-0.24.2.data → htmlgraph-0.25.0.data}/data/htmlgraph/styles.css +0 -0
  99. {htmlgraph-0.24.2.data → htmlgraph-0.25.0.data}/data/htmlgraph/templates/AGENTS.md.template +0 -0
  100. {htmlgraph-0.24.2.data → htmlgraph-0.25.0.data}/data/htmlgraph/templates/CLAUDE.md.template +0 -0
  101. {htmlgraph-0.24.2.data → htmlgraph-0.25.0.data}/data/htmlgraph/templates/GEMINI.md.template +0 -0
  102. {htmlgraph-0.24.2.dist-info → htmlgraph-0.25.0.dist-info}/WHEEL +0 -0
  103. {htmlgraph-0.24.2.dist-info → htmlgraph-0.25.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,501 @@
1
+ """
2
+ HtmlGraph Hooks State Manager
3
+
4
+ Unified state file management for hook operations:
5
+ - Parent activity tracking (for Skill/Task context)
6
+ - User query event tracking (for parent-child linking)
7
+ - Drift queue management (for auto-classification)
8
+
9
+ This module provides file-based state persistence with:
10
+ - Atomic writes (write to temp, then rename)
11
+ - File locking to prevent concurrent writes
12
+ - Error handling for missing/corrupted files
13
+ - Age-based filtering and cleanup
14
+ - Comprehensive logging
15
+
16
+ File Locations (.htmlgraph/):
17
+ - parent-activity.json: Current parent context (Skill/Task invocation)
18
+ - user-query-event-{SESSION_ID}.json: UserQuery event ID for session
19
+ - drift-queue.json: Classification queue for high-drift activities
20
+ """
21
+
22
+ import json
23
+ import logging
24
+ import os
25
+ import tempfile
26
+ from datetime import datetime, timedelta
27
+ from pathlib import Path
28
+
29
+ logger = logging.getLogger(__name__)
30
+
31
+
32
+ class ParentActivityTracker:
33
+ """
34
+ Tracks the active parent activity context for Skill/Task invocations.
35
+
36
+ Parent context allows child tool calls to link to their parent Skill/Task.
37
+ Parent activities automatically expire after 5 minutes of inactivity.
38
+
39
+ File: parent-activity.json (single entry)
40
+ ```json
41
+ {
42
+ "parent_id": "evt-xyz123",
43
+ "tool": "Task",
44
+ "timestamp": "2025-01-10T12:34:56Z"
45
+ }
46
+ ```
47
+ """
48
+
49
+ def __init__(self, graph_dir: Path):
50
+ """
51
+ Initialize parent activity tracker.
52
+
53
+ Args:
54
+ graph_dir: Path to .htmlgraph directory
55
+ """
56
+ self.graph_dir = Path(graph_dir)
57
+ self.file_path = self.graph_dir / "parent-activity.json"
58
+ self._ensure_graph_dir()
59
+
60
+ def _ensure_graph_dir(self) -> None:
61
+ """Ensure .htmlgraph directory exists."""
62
+ self.graph_dir.mkdir(parents=True, exist_ok=True)
63
+
64
+ def load(self, max_age_minutes: int = 5) -> dict:
65
+ """
66
+ Load parent activity state.
67
+
68
+ Automatically filters out stale parent activities older than max_age_minutes.
69
+ This allows long-running parent contexts (like Tasks) to timeout naturally.
70
+
71
+ Args:
72
+ max_age_minutes: Maximum age in minutes before activity is considered stale
73
+ (default: 5 minutes)
74
+
75
+ Returns:
76
+ Parent activity dict with keys: parent_id, tool, timestamp
77
+ Empty dict if file missing or stale
78
+ """
79
+ if not self.file_path.exists():
80
+ return {}
81
+
82
+ try:
83
+ with open(self.file_path) as f:
84
+ data: dict[str, object] = json.load(f)
85
+
86
+ # Validate timestamp and check if stale
87
+ if data.get("timestamp"):
88
+ ts = datetime.fromisoformat(data["timestamp"]) # type: ignore[arg-type]
89
+ age = datetime.now() - ts
90
+ if age > timedelta(minutes=max_age_minutes):
91
+ logger.debug(
92
+ f"Parent activity stale ({age.total_seconds():.0f}s > {max_age_minutes}min)"
93
+ )
94
+ return {}
95
+
96
+ logger.debug(f"Loaded parent activity: {data.get('parent_id')}")
97
+ return data # type: ignore[return-value]
98
+
99
+ except json.JSONDecodeError:
100
+ logger.warning("Corrupted parent-activity.json, returning empty state")
101
+ return {}
102
+ except (ValueError, KeyError, OSError) as e:
103
+ logger.warning(f"Error loading parent activity: {e}")
104
+ return {}
105
+
106
+ def save(self, parent_id: str, tool: str) -> None:
107
+ """
108
+ Save parent activity context.
109
+
110
+ Creates or updates parent-activity.json with the current parent context.
111
+ Uses atomic write to prevent corruption from concurrent access.
112
+
113
+ Args:
114
+ parent_id: Event ID of parent activity (e.g., "evt-xyz123")
115
+ tool: Tool name that created parent context (e.g., "Task", "Skill")
116
+ """
117
+ try:
118
+ data = {
119
+ "parent_id": parent_id,
120
+ "tool": tool,
121
+ "timestamp": datetime.now().isoformat(),
122
+ }
123
+
124
+ # Atomic write: write to temp file, then rename
125
+ with tempfile.NamedTemporaryFile(
126
+ mode="w",
127
+ dir=self.graph_dir,
128
+ delete=False,
129
+ suffix=".json",
130
+ ) as tmp:
131
+ json.dump(data, tmp)
132
+ tmp_path = tmp.name
133
+
134
+ # Atomic rename
135
+ os.replace(tmp_path, self.file_path)
136
+ logger.debug(f"Saved parent activity: {parent_id} (tool={tool})")
137
+
138
+ except OSError as e:
139
+ logger.warning(f"Could not save parent activity: {e}")
140
+ except Exception as e:
141
+ logger.error(f"Unexpected error saving parent activity: {e}")
142
+
143
+ def clear(self) -> None:
144
+ """
145
+ Delete parent activity file.
146
+
147
+ Clears the parent context, causing subsequent tool calls to not link
148
+ to a parent activity.
149
+ """
150
+ try:
151
+ self.file_path.unlink(missing_ok=True)
152
+ logger.debug("Cleared parent activity")
153
+ except OSError as e:
154
+ logger.warning(f"Could not clear parent activity: {e}")
155
+
156
+
157
+ class UserQueryEventTracker:
158
+ """
159
+ Tracks the active UserQuery event ID for parent-child linking.
160
+
161
+ Each session maintains its own UserQuery event context to support
162
+ multiple concurrent Claude windows in the same project.
163
+
164
+ UserQuery events expire after 2 minutes (conversation turn boundary),
165
+ allowing natural grouping of tool calls by conversation turn.
166
+
167
+ File: user-query-event-{SESSION_ID}.json (single entry)
168
+ ```json
169
+ {
170
+ "event_id": "evt-abc456",
171
+ "timestamp": "2025-01-10T12:34:56Z"
172
+ }
173
+ ```
174
+ """
175
+
176
+ def __init__(self, graph_dir: Path):
177
+ """
178
+ Initialize user query event tracker.
179
+
180
+ Args:
181
+ graph_dir: Path to .htmlgraph directory
182
+ """
183
+ self.graph_dir = Path(graph_dir)
184
+ self._ensure_graph_dir()
185
+
186
+ def _ensure_graph_dir(self) -> None:
187
+ """Ensure .htmlgraph directory exists."""
188
+ self.graph_dir.mkdir(parents=True, exist_ok=True)
189
+
190
+ def _get_file_path(self, session_id: str) -> Path:
191
+ """Get session-specific user query event file path."""
192
+ return self.graph_dir / f"user-query-event-{session_id}.json"
193
+
194
+ def load(self, session_id: str, max_age_minutes: int = 2) -> str | None:
195
+ """
196
+ Load active UserQuery event ID for a session.
197
+
198
+ Automatically filters out stale events older than max_age_minutes.
199
+ This creates natural conversation turn boundaries when queries timeout.
200
+
201
+ Args:
202
+ session_id: Session ID (e.g., "sess-xyz789")
203
+ max_age_minutes: Maximum age in minutes before event is considered stale
204
+ (default: 2 minutes for conversation turns)
205
+
206
+ Returns:
207
+ Event ID string (e.g., "evt-abc456") or None if missing/stale
208
+ """
209
+ file_path = self._get_file_path(session_id)
210
+ if not file_path.exists():
211
+ return None
212
+
213
+ try:
214
+ with open(file_path) as f:
215
+ data: dict[str, object] = json.load(f)
216
+
217
+ # Validate timestamp and check if stale
218
+ if data.get("timestamp"):
219
+ ts = datetime.fromisoformat(data["timestamp"]) # type: ignore[arg-type]
220
+ age = datetime.now() - ts
221
+ if age > timedelta(minutes=max_age_minutes):
222
+ logger.debug(
223
+ f"UserQuery event stale ({age.total_seconds():.0f}s > {max_age_minutes}min)"
224
+ )
225
+ return None
226
+
227
+ event_id = data.get("event_id")
228
+ logger.debug(f"Loaded UserQuery event: {event_id}")
229
+ return event_id # type: ignore[return-value]
230
+
231
+ except json.JSONDecodeError:
232
+ logger.warning(f"Corrupted user-query-event file for {session_id}")
233
+ return None
234
+ except (ValueError, KeyError, OSError) as e:
235
+ logger.warning(f"Error loading UserQuery event for {session_id}: {e}")
236
+ return None
237
+
238
+ def save(self, session_id: str, event_id: str) -> None:
239
+ """
240
+ Save UserQuery event ID for a session.
241
+
242
+ Creates or updates the session-specific user query event file.
243
+ Uses atomic write to prevent corruption from concurrent access.
244
+
245
+ Args:
246
+ session_id: Session ID (e.g., "sess-xyz789")
247
+ event_id: Event ID to save (e.g., "evt-abc456")
248
+ """
249
+ file_path = self._get_file_path(session_id)
250
+ try:
251
+ data = {
252
+ "event_id": event_id,
253
+ "timestamp": datetime.now().isoformat(),
254
+ }
255
+
256
+ # Atomic write: write to temp file, then rename
257
+ with tempfile.NamedTemporaryFile(
258
+ mode="w",
259
+ dir=self.graph_dir,
260
+ delete=False,
261
+ suffix=".json",
262
+ ) as tmp:
263
+ json.dump(data, tmp)
264
+ tmp_path = tmp.name
265
+
266
+ # Atomic rename
267
+ os.replace(tmp_path, file_path)
268
+ logger.debug(f"Saved UserQuery event: {event_id} (session={session_id})")
269
+
270
+ except OSError as e:
271
+ logger.warning(f"Could not save UserQuery event for {session_id}: {e}")
272
+ except Exception as e:
273
+ logger.error(
274
+ f"Unexpected error saving UserQuery event for {session_id}: {e}"
275
+ )
276
+
277
+ def clear(self, session_id: str) -> None:
278
+ """
279
+ Delete UserQuery event file for a session.
280
+
281
+ Clears the session's UserQuery context, allowing a new conversation turn
282
+ to begin without inheriting the previous turn's parent context.
283
+
284
+ Args:
285
+ session_id: Session ID to clear
286
+ """
287
+ file_path = self._get_file_path(session_id)
288
+ try:
289
+ file_path.unlink(missing_ok=True)
290
+ logger.debug(f"Cleared UserQuery event for {session_id}")
291
+ except OSError as e:
292
+ logger.warning(f"Could not clear UserQuery event for {session_id}: {e}")
293
+
294
+
295
+ class DriftQueueManager:
296
+ """
297
+ Manages the drift classification queue for high-drift activities.
298
+
299
+ The drift queue accumulates activities that exceed the auto-classification
300
+ threshold, triggering classification when thresholds are met.
301
+
302
+ Activities are automatically filtered by age to prevent indefinite accumulation.
303
+
304
+ File: drift-queue.json
305
+ ```json
306
+ {
307
+ "activities": [
308
+ {
309
+ "timestamp": "2025-01-10T12:34:56Z",
310
+ "tool": "Read",
311
+ "summary": "Read: /path/to/file.py",
312
+ "file_paths": ["/path/to/file.py"],
313
+ "drift_score": 0.87,
314
+ "feature_id": "feat-xyz123"
315
+ }
316
+ ],
317
+ "last_classification": "2025-01-10T12:30:00Z"
318
+ }
319
+ ```
320
+ """
321
+
322
+ def __init__(self, graph_dir: Path):
323
+ """
324
+ Initialize drift queue manager.
325
+
326
+ Args:
327
+ graph_dir: Path to .htmlgraph directory
328
+ """
329
+ self.graph_dir = Path(graph_dir)
330
+ self.file_path = self.graph_dir / "drift-queue.json"
331
+ self._ensure_graph_dir()
332
+
333
+ def _ensure_graph_dir(self) -> None:
334
+ """Ensure .htmlgraph directory exists."""
335
+ self.graph_dir.mkdir(parents=True, exist_ok=True)
336
+
337
+ def load(self, max_age_hours: int = 48) -> dict:
338
+ """
339
+ Load drift queue and filter by age.
340
+
341
+ Automatically removes activities older than max_age_hours.
342
+ This prevents the queue from growing indefinitely over time.
343
+
344
+ Args:
345
+ max_age_hours: Maximum age in hours before activities are removed
346
+ (default: 48 hours)
347
+
348
+ Returns:
349
+ Queue dict with keys: activities (list), last_classification (timestamp)
350
+ Returns default empty queue if file missing
351
+ """
352
+ if not self.file_path.exists():
353
+ return {"activities": [], "last_classification": None}
354
+
355
+ try:
356
+ with open(self.file_path) as f:
357
+ queue: dict[str, object] = json.load(f)
358
+
359
+ # Filter out stale activities
360
+ cutoff_time = datetime.now() - timedelta(hours=max_age_hours)
361
+ original_count = len(queue.get("activities", [])) # type: ignore[arg-type]
362
+
363
+ fresh_activities = []
364
+ for activity in queue.get("activities", []): # type: ignore[attr-defined]
365
+ try:
366
+ activity_time = datetime.fromisoformat(
367
+ activity.get("timestamp", "")
368
+ )
369
+ if activity_time >= cutoff_time:
370
+ fresh_activities.append(activity)
371
+ except (ValueError, TypeError):
372
+ # Keep activities with invalid timestamps to avoid data loss
373
+ fresh_activities.append(activity)
374
+
375
+ # Update queue if we removed stale entries
376
+ if len(fresh_activities) < original_count:
377
+ queue["activities"] = fresh_activities
378
+ self.save(queue)
379
+ removed = original_count - len(fresh_activities)
380
+ logger.info(
381
+ f"Cleaned {removed} stale drift queue entries (older than {max_age_hours}h)"
382
+ )
383
+
384
+ logger.debug(
385
+ f"Loaded drift queue: {len(fresh_activities)} recent activities"
386
+ )
387
+ return queue
388
+
389
+ except json.JSONDecodeError:
390
+ logger.warning("Corrupted drift-queue.json, returning empty queue")
391
+ return {"activities": [], "last_classification": None}
392
+ except (ValueError, KeyError, OSError) as e:
393
+ logger.warning(f"Error loading drift queue: {e}")
394
+ return {"activities": [], "last_classification": None}
395
+
396
+ def save(self, queue: dict) -> None:
397
+ """
398
+ Save drift queue to file.
399
+
400
+ Persists the queue with all activities and classification metadata.
401
+ Uses atomic write to prevent corruption from concurrent access.
402
+
403
+ Args:
404
+ queue: Queue dict with activities and last_classification timestamp
405
+ """
406
+ try:
407
+ # Atomic write: write to temp file, then rename
408
+ with tempfile.NamedTemporaryFile(
409
+ mode="w",
410
+ dir=self.graph_dir,
411
+ delete=False,
412
+ suffix=".json",
413
+ ) as tmp:
414
+ json.dump(queue, tmp, indent=2, default=str)
415
+ tmp_path = tmp.name
416
+
417
+ # Atomic rename
418
+ os.replace(tmp_path, self.file_path)
419
+ logger.debug(
420
+ f"Saved drift queue: {len(queue.get('activities', []))} activities"
421
+ )
422
+
423
+ except OSError as e:
424
+ logger.warning(f"Could not save drift queue: {e}")
425
+ except Exception as e:
426
+ logger.error(f"Unexpected error saving drift queue: {e}")
427
+
428
+ def add_activity(self, activity: dict, timestamp: datetime | None = None) -> None:
429
+ """
430
+ Add activity to drift queue.
431
+
432
+ Appends a high-drift activity to the queue for later classification.
433
+ Timestamp defaults to current time if not provided.
434
+
435
+ Args:
436
+ activity: Activity dict with keys: tool, summary, file_paths, drift_score, feature_id
437
+ timestamp: Activity timestamp (defaults to now)
438
+ """
439
+ if timestamp is None:
440
+ timestamp = datetime.now()
441
+
442
+ queue = self.load()
443
+ queue["activities"].append(
444
+ {
445
+ "timestamp": timestamp.isoformat(),
446
+ "tool": activity.get("tool"),
447
+ "summary": activity.get("summary"),
448
+ "file_paths": activity.get("file_paths", []),
449
+ "drift_score": activity.get("drift_score"),
450
+ "feature_id": activity.get("feature_id"),
451
+ }
452
+ )
453
+ self.save(queue)
454
+ logger.debug(
455
+ f"Added activity to drift queue (drift_score={activity.get('drift_score')})"
456
+ )
457
+
458
+ def clear(self) -> None:
459
+ """
460
+ Delete drift queue file.
461
+
462
+ Removes the entire drift queue, typically after classification completes.
463
+ """
464
+ try:
465
+ self.file_path.unlink(missing_ok=True)
466
+ logger.debug("Cleared drift queue")
467
+ except OSError as e:
468
+ logger.warning(f"Could not clear drift queue: {e}")
469
+
470
+ def clear_activities(self) -> None:
471
+ """
472
+ Clear activities from queue while preserving last_classification timestamp.
473
+
474
+ Called after successful classification to remove processed activities
475
+ while keeping track of when the last classification occurred.
476
+ """
477
+ try:
478
+ queue = {
479
+ "activities": [],
480
+ "last_classification": datetime.now().isoformat(),
481
+ }
482
+
483
+ # Preserve existing last_classification if this file already exists
484
+ if self.file_path.exists():
485
+ try:
486
+ with open(self.file_path) as f:
487
+ existing = json.load(f)
488
+ if existing.get("last_classification"):
489
+ queue["last_classification"] = existing[
490
+ "last_classification"
491
+ ]
492
+ except Exception:
493
+ pass
494
+
495
+ self.save(queue)
496
+ logger.debug(
497
+ "Cleared drift queue activities (preserved classification timestamp)"
498
+ )
499
+
500
+ except Exception as e:
501
+ logger.error(f"Error clearing drift queue activities: {e}")