devloop 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. devloop/__init__.py +3 -0
  2. devloop/agents/__init__.py +33 -0
  3. devloop/agents/agent_health_monitor.py +105 -0
  4. devloop/agents/ci_monitor.py +237 -0
  5. devloop/agents/code_rabbit.py +248 -0
  6. devloop/agents/doc_lifecycle.py +374 -0
  7. devloop/agents/echo.py +24 -0
  8. devloop/agents/file_logger.py +46 -0
  9. devloop/agents/formatter.py +511 -0
  10. devloop/agents/git_commit_assistant.py +421 -0
  11. devloop/agents/linter.py +399 -0
  12. devloop/agents/performance_profiler.py +284 -0
  13. devloop/agents/security_scanner.py +322 -0
  14. devloop/agents/snyk.py +292 -0
  15. devloop/agents/test_runner.py +484 -0
  16. devloop/agents/type_checker.py +242 -0
  17. devloop/cli/__init__.py +1 -0
  18. devloop/cli/commands/__init__.py +1 -0
  19. devloop/cli/commands/custom_agents.py +144 -0
  20. devloop/cli/commands/feedback.py +161 -0
  21. devloop/cli/commands/summary.py +50 -0
  22. devloop/cli/main.py +430 -0
  23. devloop/cli/main_v1.py +144 -0
  24. devloop/collectors/__init__.py +17 -0
  25. devloop/collectors/base.py +55 -0
  26. devloop/collectors/filesystem.py +126 -0
  27. devloop/collectors/git.py +171 -0
  28. devloop/collectors/manager.py +159 -0
  29. devloop/collectors/process.py +221 -0
  30. devloop/collectors/system.py +195 -0
  31. devloop/core/__init__.py +21 -0
  32. devloop/core/agent.py +206 -0
  33. devloop/core/agent_template.py +498 -0
  34. devloop/core/amp_integration.py +166 -0
  35. devloop/core/auto_fix.py +224 -0
  36. devloop/core/config.py +272 -0
  37. devloop/core/context.py +0 -0
  38. devloop/core/context_store.py +530 -0
  39. devloop/core/contextual_feedback.py +311 -0
  40. devloop/core/custom_agent.py +439 -0
  41. devloop/core/debug_trace.py +289 -0
  42. devloop/core/event.py +105 -0
  43. devloop/core/event_store.py +316 -0
  44. devloop/core/feedback.py +311 -0
  45. devloop/core/learning.py +351 -0
  46. devloop/core/manager.py +219 -0
  47. devloop/core/performance.py +433 -0
  48. devloop/core/proactive_feedback.py +302 -0
  49. devloop/core/summary_formatter.py +159 -0
  50. devloop/core/summary_generator.py +275 -0
  51. devloop-0.2.0.dist-info/METADATA +705 -0
  52. devloop-0.2.0.dist-info/RECORD +55 -0
  53. devloop-0.2.0.dist-info/WHEEL +4 -0
  54. devloop-0.2.0.dist-info/entry_points.txt +3 -0
  55. devloop-0.2.0.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,374 @@
1
+ """Documentation Lifecycle Agent - manages documentation lifecycle."""
2
+
3
+ import re
4
+ from datetime import datetime
5
+ from pathlib import Path
6
+ from typing import Any, Dict, List, Optional
7
+ from dataclasses import dataclass, field
8
+
9
+ from devloop.core.agent import Agent, AgentResult
10
+ from devloop.core.event import Event
11
+
12
+
13
+ @dataclass
14
+ class DocLifecycleConfig:
15
+ """Configuration for documentation lifecycle management."""
16
+
17
+ mode: str = "report-only" # or "auto-fix"
18
+ scan_interval: int = 86400 # Daily (seconds)
19
+ archival_age_days: int = 30
20
+ root_md_limit: int = 10
21
+
22
+ completion_markers: List[str] = field(
23
+ default_factory=lambda: [
24
+ "COMPLETE ✅",
25
+ "RESOLVED ✅",
26
+ "Complete!",
27
+ "Status: Complete",
28
+ ]
29
+ )
30
+
31
+ temporary_prefixes: List[str] = field(
32
+ default_factory=lambda: ["SESSION_", "FIX_", "THREADING_", "STATUS"]
33
+ )
34
+
35
+ archive_dir: str = "docs/archive"
36
+ enforce_docs_structure: bool = True
37
+ detect_duplicates: bool = True
38
+ similarity_threshold: float = 0.5
39
+
40
+ keep_in_root: List[str] = field(
41
+ default_factory=lambda: [
42
+ "README.md",
43
+ "CHANGELOG.md",
44
+ "LICENSE",
45
+ "LICENSE.md",
46
+ "CONTRIBUTING.md",
47
+ "CODE_OF_CONDUCT.md",
48
+ "SECURITY.md",
49
+ "CLAUDE.md",
50
+ "CODING_RULES.md",
51
+ "PUBLISHING_PLAN.md",
52
+ "CI_QUALITY_COMMITMENT.md",
53
+ ]
54
+ )
55
+
56
+ never_archive: List[str] = field(
57
+ default_factory=lambda: ["README.md", "CLAUDE.md", "CODING_RULES.md"]
58
+ )
59
+
60
+
61
+ class DocLifecycleAgent(Agent):
62
+ """Agent for managing documentation lifecycle."""
63
+
64
+ def __init__(
65
+ self,
66
+ name: str = "doc-lifecycle",
67
+ triggers: Optional[List[str]] = None,
68
+ event_bus=None,
69
+ config: Optional[Dict[str, Any]] = None,
70
+ ):
71
+ super().__init__(
72
+ name=name,
73
+ triggers=triggers
74
+ or ["file:created:**.md", "file:modified:**.md", "schedule:daily"],
75
+ event_bus=event_bus,
76
+ )
77
+
78
+ # Parse config
79
+ config_dict = config or {}
80
+ self.config = (
81
+ DocLifecycleConfig(**config_dict) if config_dict else DocLifecycleConfig()
82
+ )
83
+
84
+ self.project_root = Path.cwd()
85
+
86
+ async def handle(self, event: Event) -> AgentResult:
87
+ """Handle documentation lifecycle events."""
88
+ try:
89
+ # Scan all markdown files
90
+ findings = await self.scan_documentation()
91
+
92
+ return AgentResult(
93
+ agent_name=self.name,
94
+ success=True,
95
+ duration=0.0, # Would calculate actual duration
96
+ message=f"Documentation scan complete: {len(findings)} findings",
97
+ data={
98
+ "findings": findings,
99
+ "total_md_files": self._count_md_files(),
100
+ "root_md_files": self._count_root_md_files(),
101
+ },
102
+ )
103
+ except Exception as e:
104
+ return AgentResult(
105
+ agent_name=self.name,
106
+ success=False,
107
+ duration=0.0,
108
+ message=f"Documentation scan failed: {str(e)}",
109
+ error=str(e),
110
+ )
111
+
112
+ async def scan_documentation(self) -> List[Dict[str, Any]]:
113
+ """Scan all documentation and return findings."""
114
+ findings: List[Dict[str, Any]] = []
115
+
116
+ # Find all markdown files
117
+ md_files = self._find_markdown_files()
118
+
119
+ # Check root directory overflow
120
+ root_md_count = self._count_root_md_files()
121
+ if root_md_count > self.config.root_md_limit:
122
+ findings.append(
123
+ {
124
+ "type": "documentation",
125
+ "severity": "info",
126
+ "category": "root_overflow",
127
+ "file": "(root directory)",
128
+ "message": f"Root directory has {root_md_count} markdown files (limit: {self.config.root_md_limit})",
129
+ "suggestion": "Consider moving reference docs to docs/ directory",
130
+ "auto_fixable": False,
131
+ }
132
+ )
133
+
134
+ # Analyze each file
135
+ for md_file in md_files:
136
+ file_findings = await self._analyze_file(md_file)
137
+ findings.extend(file_findings)
138
+
139
+ # Detect duplicates
140
+ if self.config.detect_duplicates:
141
+ duplicates = self._detect_duplicate_docs(md_files)
142
+ for dup_group in duplicates:
143
+ findings.append(
144
+ {
145
+ "type": "documentation",
146
+ "severity": "info",
147
+ "category": "duplicates",
148
+ "files": [str(f) for f in dup_group],
149
+ "message": f"Found {len(dup_group)} similar documentation files",
150
+ "suggestion": f"Consider consolidating: {', '.join(f.name for f in dup_group)}",
151
+ "auto_fixable": False,
152
+ }
153
+ )
154
+
155
+ return findings
156
+
157
+ def _find_markdown_files(self) -> List[Path]:
158
+ """Find all markdown files in project."""
159
+ # Search in current directory and docs/
160
+ md_files: List[Path] = []
161
+
162
+ # Root level markdown files
163
+ md_files.extend(self.project_root.glob("*.md"))
164
+
165
+ # docs/ directory if it exists
166
+ docs_dir = self.project_root / "docs"
167
+ if docs_dir.exists():
168
+ md_files.extend(docs_dir.rglob("*.md"))
169
+
170
+ return md_files
171
+
172
+ def _count_md_files(self) -> int:
173
+ """Count all markdown files."""
174
+ return len(self._find_markdown_files())
175
+
176
+ def _count_root_md_files(self) -> int:
177
+ """Count markdown files in root directory."""
178
+ return len(list(self.project_root.glob("*.md")))
179
+
180
+ async def _analyze_file(self, file_path: Path) -> List[Dict[str, Any]]:
181
+ """Analyze a single markdown file for lifecycle patterns."""
182
+ findings: List[Dict[str, Any]] = []
183
+
184
+ # Skip if in never_archive list
185
+ if file_path.name in self.config.never_archive:
186
+ return findings
187
+
188
+ try:
189
+ content = file_path.read_text()
190
+
191
+ # Check for completion markers
192
+ for marker in self.config.completion_markers:
193
+ if marker in content:
194
+ # Check if file is old enough to archive
195
+ age_days = self._get_file_age_days(file_path)
196
+
197
+ suggestion = self._suggest_archive_location(file_path)
198
+ message = f"Document marked as complete: {marker}"
199
+
200
+ if age_days > self.config.archival_age_days:
201
+ message += f" (> {self.config.archival_age_days} days old)"
202
+
203
+ findings.append(
204
+ {
205
+ "type": "documentation",
206
+ "severity": "info",
207
+ "category": "archival",
208
+ "file": str(file_path),
209
+ "message": message,
210
+ "suggestion": suggestion,
211
+ "auto_fixable": True,
212
+ "age_days": age_days,
213
+ }
214
+ )
215
+ break # Only report once per file
216
+
217
+ # Check for temporary file patterns
218
+ if self._is_temporary_file(file_path):
219
+ findings.append(
220
+ {
221
+ "type": "documentation",
222
+ "severity": "info",
223
+ "category": "temporary",
224
+ "file": str(file_path),
225
+ "message": f"Temporary documentation file: {file_path.name}",
226
+ "suggestion": "Consider archiving or consolidating",
227
+ "auto_fixable": False,
228
+ }
229
+ )
230
+
231
+ # Check for date stamps
232
+ date_pattern = r"\*\*Date:\*\*\s+(\w+ \d+, \d{4})"
233
+ dates = re.findall(date_pattern, content)
234
+ if dates:
235
+ findings.append(
236
+ {
237
+ "type": "documentation",
238
+ "severity": "info",
239
+ "category": "dated",
240
+ "file": str(file_path),
241
+ "message": f"Found date stamp: {dates[0]}",
242
+ "metadata": {"dates": dates},
243
+ "auto_fixable": False,
244
+ }
245
+ )
246
+
247
+ # Check if file should be in docs/ instead of root
248
+ if (
249
+ file_path.parent == self.project_root
250
+ and file_path.name not in self.config.keep_in_root
251
+ ):
252
+ findings.append(
253
+ {
254
+ "type": "documentation",
255
+ "severity": "info",
256
+ "category": "location",
257
+ "file": str(file_path),
258
+ "message": f"File in root should possibly be in docs/: {file_path.name}",
259
+ "suggestion": self._suggest_docs_location(file_path),
260
+ "auto_fixable": False,
261
+ }
262
+ )
263
+
264
+ except Exception as e:
265
+ findings.append(
266
+ {
267
+ "type": "documentation",
268
+ "severity": "warning",
269
+ "category": "error",
270
+ "file": str(file_path),
271
+ "message": f"Failed to analyze: {e}",
272
+ "auto_fixable": False,
273
+ }
274
+ )
275
+
276
+ return findings
277
+
278
+ def _is_temporary_file(self, file_path: Path) -> bool:
279
+ """Check if file matches temporary file patterns."""
280
+ for prefix in self.config.temporary_prefixes:
281
+ if file_path.name.startswith(prefix):
282
+ return True
283
+ return False
284
+
285
+ def _get_file_age_days(self, file_path: Path) -> int:
286
+ """Get file age in days."""
287
+ mod_time = datetime.fromtimestamp(file_path.stat().st_mtime)
288
+ age = datetime.now() - mod_time
289
+ return age.days
290
+
291
+ def _suggest_archive_location(self, file_path: Path) -> str:
292
+ """Suggest where to archive a file."""
293
+ # Extract date from modification time
294
+ mod_time = datetime.fromtimestamp(file_path.stat().st_mtime)
295
+ archive_month = mod_time.strftime("%Y-%m")
296
+
297
+ archive_path = (
298
+ Path(self.config.archive_dir)
299
+ / archive_month
300
+ / file_path.name.lower().replace("_", "-")
301
+ )
302
+ return f"Archive to {archive_path}"
303
+
304
+ def _suggest_docs_location(self, file_path: Path) -> str:
305
+ """Suggest where to move a file in docs/."""
306
+ name_lower = file_path.name.lower()
307
+
308
+ # Suggest location based on name patterns
309
+ if any(x in name_lower for x in ["guide", "howto", "tutorial"]):
310
+ return f"Move to docs/guides/{file_path.name}"
311
+ elif any(x in name_lower for x in ["reference", "api", "schema", "spec"]):
312
+ return f"Move to docs/reference/{file_path.name}"
313
+ elif any(x in name_lower for x in ["contributing", "development"]):
314
+ return f"Move to docs/contributing/{file_path.name}"
315
+ else:
316
+ return f"Move to docs/{file_path.name}"
317
+
318
+ def _detect_duplicate_docs(self, md_files: List[Path]) -> List[List[Path]]:
319
+ """Detect potentially duplicate documentation files."""
320
+ duplicates = []
321
+
322
+ # Group by similar names (normalized)
323
+ name_groups: Dict[str, List[Path]] = {}
324
+ for f in md_files:
325
+ # Normalize name: lowercase, remove common suffixes/prefixes, remove version numbers
326
+ normalized = f.stem.lower()
327
+ normalized = re.sub(r"[_-]v\d+", "", normalized) # Remove version numbers
328
+ normalized = re.sub(
329
+ r"[_-]complete.*", "", normalized
330
+ ) # Remove "complete" suffix
331
+ normalized = re.sub(
332
+ r"[_-]summary.*", "", normalized
333
+ ) # Remove "summary" suffix
334
+ normalized = normalized.replace("_", "-")
335
+
336
+ if normalized not in name_groups:
337
+ name_groups[normalized] = []
338
+ name_groups[normalized].append(f)
339
+
340
+ # Return groups with > 1 file
341
+ for group in name_groups.values():
342
+ if len(group) > 1:
343
+ duplicates.append(group)
344
+
345
+ return duplicates
346
+
347
+ async def auto_fix(self, finding: Dict[str, Any]) -> bool:
348
+ """Automatically fix a documentation lifecycle issue."""
349
+ if self.config.mode != "auto-fix":
350
+ return False
351
+
352
+ if finding.get("category") == "archival" and finding.get("auto_fixable"):
353
+ # Move file to archive
354
+ source = Path(finding["file"])
355
+ suggestion = finding["suggestion"]
356
+
357
+ # Extract destination from suggestion
358
+ # Format: "Archive to docs/archive/YYYY-MM/filename.md"
359
+ match = re.search(r"Archive to (.+)$", suggestion)
360
+ if match:
361
+ dest = Path(match.group(1))
362
+ dest.parent.mkdir(parents=True, exist_ok=True)
363
+
364
+ try:
365
+ source.rename(dest)
366
+ return True
367
+ except Exception:
368
+ return False
369
+
370
+ return False
371
+
372
+
373
+ # Export
374
+ __all__ = ["DocLifecycleAgent", "DocLifecycleConfig"]
devloop/agents/echo.py ADDED
@@ -0,0 +1,24 @@
1
+ """Echo agent - simply logs received events (for testing)."""
2
+
3
+ from devloop.core.agent import Agent, AgentResult
4
+ from devloop.core.event import Event
5
+
6
+
7
+ class EchoAgent(Agent):
8
+ """Agent that echoes all events it receives."""
9
+
10
+ async def handle(self, event: Event) -> AgentResult:
11
+ """Echo the event."""
12
+ message = f"Received {event.type} from {event.source}"
13
+
14
+ # Log payload for file events
15
+ if "file" in event.type and "path" in event.payload:
16
+ message += f": {event.payload['path']}"
17
+
18
+ return AgentResult(
19
+ agent_name=self.name,
20
+ success=True,
21
+ duration=0,
22
+ message=message,
23
+ data=event.payload,
24
+ )
@@ -0,0 +1,46 @@
1
+ """File logger agent - logs file changes to a file."""
2
+
3
+ import json
4
+ from pathlib import Path
5
+
6
+ from devloop.core.agent import Agent, AgentResult
7
+ from devloop.core.event import Event
8
+
9
+
10
+ class FileLoggerAgent(Agent):
11
+ """Agent that logs file changes to .devloop/file-changes.log"""
12
+
13
+ def __init__(self, *args, **kwargs):
14
+ super().__init__(*args, **kwargs)
15
+ self.log_file = Path(".devloop/file-changes.log")
16
+ self.log_file.parent.mkdir(exist_ok=True)
17
+
18
+ async def handle(self, event: Event) -> AgentResult:
19
+ """Log file change to file."""
20
+ # Only handle file events
21
+ if not event.type.startswith("file:"):
22
+ return AgentResult(
23
+ agent_name=self.name,
24
+ success=True,
25
+ duration=0,
26
+ message="Skipped non-file event",
27
+ )
28
+
29
+ # Create log entry
30
+ log_entry = {
31
+ "timestamp": event.timestamp,
32
+ "event_type": event.type,
33
+ "path": event.payload.get("path", "unknown"),
34
+ "source": event.source,
35
+ }
36
+
37
+ # Append to log file
38
+ with open(self.log_file, "a") as f:
39
+ f.write(json.dumps(log_entry) + "\n")
40
+
41
+ return AgentResult(
42
+ agent_name=self.name,
43
+ success=True,
44
+ duration=0,
45
+ message=f"Logged {event.type}: {event.payload.get('path', 'unknown')}",
46
+ )