intercept-agent 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- intercept_agent-0.2.0.dist-info/METADATA +9 -0
- intercept_agent-0.2.0.dist-info/RECORD +24 -0
- intercept_agent-0.2.0.dist-info/WHEEL +4 -0
- intercept_agent-0.2.0.dist-info/entry_points.txt +2 -0
- posture_agent/__init__.py +0 -0
- posture_agent/collectors/__init__.py +0 -0
- posture_agent/collectors/ai_tools.py +79 -0
- posture_agent/collectors/base.py +24 -0
- posture_agent/collectors/dev_tools.py +59 -0
- posture_agent/collectors/extensions.py +144 -0
- posture_agent/collectors/ides.py +75 -0
- posture_agent/collectors/machine.py +63 -0
- posture_agent/collectors/package_managers.py +51 -0
- posture_agent/collectors/security.py +178 -0
- posture_agent/core/__init__.py +0 -0
- posture_agent/core/config.py +99 -0
- posture_agent/main.py +224 -0
- posture_agent/models/__init__.py +0 -0
- posture_agent/models/report.py +21 -0
- posture_agent/services/__init__.py +0 -0
- posture_agent/services/fingerprint.py +39 -0
- posture_agent/services/reporter.py +29 -0
- posture_agent/utils/__init__.py +0 -0
- posture_agent/utils/shell.py +38 -0
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: intercept-agent
|
|
3
|
+
Version: 0.2.0
|
|
4
|
+
Summary: Intercept Developer Posture Agent - collects developer environment data
|
|
5
|
+
Requires-Python: >=3.12
|
|
6
|
+
Requires-Dist: click>=8.1.0
|
|
7
|
+
Requires-Dist: httpx>=0.28.0
|
|
8
|
+
Requires-Dist: pydantic>=2.10.0
|
|
9
|
+
Requires-Dist: pyyaml>=6.0.0
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
posture_agent/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
posture_agent/main.py,sha256=NCeCQulpZhXZ1ZjpvznrOHFqXuaA18qBvmWsOgw2CB4,7147
|
|
3
|
+
posture_agent/collectors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
+
posture_agent/collectors/ai_tools.py,sha256=Gs0GiUkNxLJedXu9exNDff_1TyaFJT0LoZ0z2nXohP4,2719
|
|
5
|
+
posture_agent/collectors/base.py,sha256=s939WaTB1eIyqEHNY8fV1yRTw-eFaiRyKdFzZmpe23U,477
|
|
6
|
+
posture_agent/collectors/dev_tools.py,sha256=V3xkde6W-UTA1PGypR24veFosFXlwg6YLVsSnRK7big,1914
|
|
7
|
+
posture_agent/collectors/extensions.py,sha256=oKNQF4Q_d7Y56USiGGDdSospAfqGy2zCN9sm_3P6MCg,5468
|
|
8
|
+
posture_agent/collectors/ides.py,sha256=tDdL6USDwRfo4r0M2EMJyCfHP6vedcTOvfMYupzoaJo,2799
|
|
9
|
+
posture_agent/collectors/machine.py,sha256=cxB9LBDd3a4t67hSFGpSDULOnuOr9jRo4IJd9Ia5BOw,1879
|
|
10
|
+
posture_agent/collectors/package_managers.py,sha256=bPaHOIpiP-EXGldTsc9AnK7PkkeBCnLZh8AIh_rWu4M,1662
|
|
11
|
+
posture_agent/collectors/security.py,sha256=ebAN5x_i8Wlq4wyA-9i7ilnQ-_8IgePImRczPHyI3jw,7809
|
|
12
|
+
posture_agent/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
13
|
+
posture_agent/core/config.py,sha256=Lk1WdmjOE7eblQxG-fdXLeJrkIeF7w8eHJ0m5IOeNgc,2721
|
|
14
|
+
posture_agent/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
15
|
+
posture_agent/models/report.py,sha256=cDpsK9DHVcL0df5RAlc_EnF24L_eIsiYA1JjfJAICU0,644
|
|
16
|
+
posture_agent/services/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
17
|
+
posture_agent/services/fingerprint.py,sha256=CZweetzmjtu8dLFW8TeN1BJHSKNCujH4qQ9_6ge93Pw,1450
|
|
18
|
+
posture_agent/services/reporter.py,sha256=Qyd03ts9lDul-JosSQqUTnQsSiRUQkKYveVa4kY1t48,1115
|
|
19
|
+
posture_agent/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
20
|
+
posture_agent/utils/shell.py,sha256=536rlhmaCbCwHjEfSokO5tlrkdEMbtCcIHfZ-f40KNw,1234
|
|
21
|
+
intercept_agent-0.2.0.dist-info/METADATA,sha256=oZsq1jVASBqaWkGxv01pmEVenbY79KT8SW93yspd7B0,282
|
|
22
|
+
intercept_agent-0.2.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
23
|
+
intercept_agent-0.2.0.dist-info/entry_points.txt,sha256=zQAYh4GBdBWnNuiyVJt5zNPZAMVD8OhUzO4hygsX3B8,59
|
|
24
|
+
intercept_agent-0.2.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
"""AI tools collector."""
|
|
2
|
+
|
|
3
|
+
from posture_agent.collectors.base import BaseCollector, CollectorResult
|
|
4
|
+
from posture_agent.utils.shell import check_version, run_command
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
# Known AI extension IDs across editors
|
|
8
|
+
AI_EXTENSIONS = {
|
|
9
|
+
"github.copilot",
|
|
10
|
+
"github.copilot-chat",
|
|
11
|
+
"sourcegraph.cody-ai",
|
|
12
|
+
"continue.continue",
|
|
13
|
+
"amazonwebservices.amazon-q-vscode",
|
|
14
|
+
"saoudrizwan.claude-dev",
|
|
15
|
+
"cursor.cursor-ai",
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
# CLI-based AI tools: (name, binary, version_flag)
|
|
19
|
+
AI_CLI_TOOLS = [
|
|
20
|
+
("Claude Code", "claude", "--version"),
|
|
21
|
+
("GitHub Copilot CLI", "github-copilot-cli", "--version"),
|
|
22
|
+
("Aider", "aider", "--version"),
|
|
23
|
+
("Open Interpreter", "interpreter", "--version"),
|
|
24
|
+
]
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class AIToolsCollector(BaseCollector):
|
|
28
|
+
"""Collects AI coding tool information."""
|
|
29
|
+
|
|
30
|
+
name = "ai_tools"
|
|
31
|
+
|
|
32
|
+
async def collect(self) -> CollectorResult:
|
|
33
|
+
errors: list[str] = []
|
|
34
|
+
ai_tools: list[dict[str, str]] = []
|
|
35
|
+
|
|
36
|
+
# Check CLI tools
|
|
37
|
+
for name, binary, version_flag in AI_CLI_TOOLS:
|
|
38
|
+
try:
|
|
39
|
+
which_result = await run_command("which", binary)
|
|
40
|
+
if which_result and which_result.strip():
|
|
41
|
+
version = await check_version(binary, version_flag) or ""
|
|
42
|
+
ai_tools.append({
|
|
43
|
+
"name": name,
|
|
44
|
+
"type": "cli",
|
|
45
|
+
"binary": binary,
|
|
46
|
+
"version": version,
|
|
47
|
+
})
|
|
48
|
+
except Exception as e:
|
|
49
|
+
errors.append(f"{name}: {e}")
|
|
50
|
+
|
|
51
|
+
# Check VS Code/Cursor extensions for AI tools
|
|
52
|
+
for binary in ("code", "cursor"):
|
|
53
|
+
try:
|
|
54
|
+
which_result = await run_command("which", binary)
|
|
55
|
+
if not which_result or not which_result.strip():
|
|
56
|
+
continue
|
|
57
|
+
|
|
58
|
+
result = await run_command(binary, "--list-extensions")
|
|
59
|
+
if not result:
|
|
60
|
+
continue
|
|
61
|
+
|
|
62
|
+
installed_exts = {ext.strip().lower() for ext in result.strip().split("\n") if ext.strip()}
|
|
63
|
+
for ai_ext_id in AI_EXTENSIONS:
|
|
64
|
+
if ai_ext_id.lower() in installed_exts:
|
|
65
|
+
editor = "VS Code" if binary == "code" else "Cursor"
|
|
66
|
+
ai_tools.append({
|
|
67
|
+
"name": ai_ext_id,
|
|
68
|
+
"type": "extension",
|
|
69
|
+
"editor": editor,
|
|
70
|
+
"version": "",
|
|
71
|
+
})
|
|
72
|
+
except Exception as e:
|
|
73
|
+
errors.append(f"AI extensions ({binary}): {e}")
|
|
74
|
+
|
|
75
|
+
return CollectorResult(
|
|
76
|
+
collector=self.name,
|
|
77
|
+
data=ai_tools,
|
|
78
|
+
errors=errors,
|
|
79
|
+
)
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"""Base collector interface."""
|
|
2
|
+
|
|
3
|
+
from abc import ABC, abstractmethod
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
from pydantic import BaseModel
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class CollectorResult(BaseModel):
|
|
10
|
+
"""Result from a collector."""
|
|
11
|
+
collector: str
|
|
12
|
+
data: Any
|
|
13
|
+
errors: list[str] = []
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class BaseCollector(ABC):
|
|
17
|
+
"""Base class for all collectors."""
|
|
18
|
+
|
|
19
|
+
name: str = "base"
|
|
20
|
+
|
|
21
|
+
@abstractmethod
|
|
22
|
+
async def collect(self) -> CollectorResult:
|
|
23
|
+
"""Collect data and return a result."""
|
|
24
|
+
...
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
"""Developer tools collector."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
|
|
5
|
+
from posture_agent.collectors.base import BaseCollector, CollectorResult
|
|
6
|
+
from posture_agent.utils.shell import check_version, run_command
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
# Tool definitions: (name, binary, version_flag)
|
|
10
|
+
DEV_TOOL_DEFINITIONS = [
|
|
11
|
+
("Git", "git", "--version"),
|
|
12
|
+
("Docker", "docker", "--version"),
|
|
13
|
+
("Node.js", "node", "--version"),
|
|
14
|
+
("Python", "python3", "--version"),
|
|
15
|
+
("Go", "go", "version"),
|
|
16
|
+
("Rust", "rustc", "--version"),
|
|
17
|
+
("Ruby", "ruby", "--version"),
|
|
18
|
+
("Java", "java", "--version"),
|
|
19
|
+
("Swift", "swift", "--version"),
|
|
20
|
+
("Make", "make", "--version"),
|
|
21
|
+
("CMake", "cmake", "--version"),
|
|
22
|
+
("Terraform", "terraform", "--version"),
|
|
23
|
+
("kubectl", "kubectl", "version --client"),
|
|
24
|
+
("Helm", "helm", "version --short"),
|
|
25
|
+
("AWS CLI", "aws", "--version"),
|
|
26
|
+
("gcloud", "gcloud", "--version"),
|
|
27
|
+
("Azure CLI", "az", "--version"),
|
|
28
|
+
]
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class DevToolsCollector(BaseCollector):
|
|
32
|
+
"""Collects installed developer tools."""
|
|
33
|
+
|
|
34
|
+
name = "dev_tools"
|
|
35
|
+
|
|
36
|
+
async def collect(self) -> CollectorResult:
|
|
37
|
+
errors: list[str] = []
|
|
38
|
+
tools: list[dict[str, str]] = []
|
|
39
|
+
|
|
40
|
+
for name, binary, version_flag in DEV_TOOL_DEFINITIONS:
|
|
41
|
+
try:
|
|
42
|
+
which_result = await run_command("which", binary)
|
|
43
|
+
if which_result and which_result.strip():
|
|
44
|
+
version = await check_version(binary, version_flag) or ""
|
|
45
|
+
path = os.path.realpath(which_result.strip())
|
|
46
|
+
tools.append({
|
|
47
|
+
"name": name,
|
|
48
|
+
"binary": binary,
|
|
49
|
+
"version": version,
|
|
50
|
+
"path": path,
|
|
51
|
+
})
|
|
52
|
+
except Exception as e:
|
|
53
|
+
errors.append(f"{name}: {e}")
|
|
54
|
+
|
|
55
|
+
return CollectorResult(
|
|
56
|
+
collector=self.name,
|
|
57
|
+
data=tools,
|
|
58
|
+
errors=errors,
|
|
59
|
+
)
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
"""IDE extension collector."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import re
|
|
5
|
+
import zipfile
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from posture_agent.collectors.base import BaseCollector, CollectorResult
|
|
9
|
+
from posture_agent.utils.shell import run_command
|
|
10
|
+
|
|
11
|
+
# Map binary name to extensions directory
|
|
12
|
+
_EXT_DIRS = {
|
|
13
|
+
"code": Path.home() / ".vscode" / "extensions",
|
|
14
|
+
"cursor": Path.home() / ".cursor" / "extensions",
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class ExtensionCollector(BaseCollector):
|
|
19
|
+
"""Collects IDE extension information."""
|
|
20
|
+
|
|
21
|
+
name = "extensions"
|
|
22
|
+
|
|
23
|
+
async def collect(self) -> CollectorResult:
|
|
24
|
+
errors: list[str] = []
|
|
25
|
+
extensions: dict[str, list[dict[str, str]]] = {}
|
|
26
|
+
|
|
27
|
+
# VS Code extensions
|
|
28
|
+
try:
|
|
29
|
+
vscode_exts = await self._collect_vscode_extensions("code")
|
|
30
|
+
if vscode_exts:
|
|
31
|
+
extensions["vscode"] = vscode_exts
|
|
32
|
+
except Exception as e:
|
|
33
|
+
errors.append(f"VS Code extensions: {e}")
|
|
34
|
+
|
|
35
|
+
# Cursor extensions
|
|
36
|
+
try:
|
|
37
|
+
cursor_exts = await self._collect_vscode_extensions("cursor")
|
|
38
|
+
if cursor_exts:
|
|
39
|
+
extensions["cursor"] = cursor_exts
|
|
40
|
+
except Exception as e:
|
|
41
|
+
errors.append(f"Cursor extensions: {e}")
|
|
42
|
+
|
|
43
|
+
# JetBrains plugins
|
|
44
|
+
try:
|
|
45
|
+
jetbrains_plugins = await self._collect_jetbrains_plugins()
|
|
46
|
+
if jetbrains_plugins:
|
|
47
|
+
extensions["jetbrains"] = jetbrains_plugins
|
|
48
|
+
except Exception as e:
|
|
49
|
+
errors.append(f"JetBrains plugins: {e}")
|
|
50
|
+
|
|
51
|
+
return CollectorResult(
|
|
52
|
+
collector=self.name,
|
|
53
|
+
data=extensions,
|
|
54
|
+
errors=errors,
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
async def _collect_vscode_extensions(self, binary: str) -> list[dict[str, str]]:
|
|
58
|
+
"""Collect extensions for VS Code or Cursor via CLI or filesystem."""
|
|
59
|
+
# Try CLI first
|
|
60
|
+
which_result = await run_command("which", binary)
|
|
61
|
+
if which_result and which_result.strip():
|
|
62
|
+
result = await run_command(binary, "--list-extensions", "--show-versions")
|
|
63
|
+
if result:
|
|
64
|
+
exts = []
|
|
65
|
+
for line in result.strip().split("\n"):
|
|
66
|
+
line = line.strip()
|
|
67
|
+
if not line:
|
|
68
|
+
continue
|
|
69
|
+
if "@" in line:
|
|
70
|
+
ext_id, version = line.rsplit("@", 1)
|
|
71
|
+
exts.append({"id": ext_id, "version": version})
|
|
72
|
+
else:
|
|
73
|
+
exts.append({"id": line, "version": ""})
|
|
74
|
+
return exts
|
|
75
|
+
|
|
76
|
+
# Fall back to reading extensions directory
|
|
77
|
+
ext_dir = _EXT_DIRS.get(binary)
|
|
78
|
+
if not ext_dir or not ext_dir.exists():
|
|
79
|
+
return []
|
|
80
|
+
|
|
81
|
+
exts = []
|
|
82
|
+
for entry in ext_dir.iterdir():
|
|
83
|
+
if not entry.is_dir():
|
|
84
|
+
continue
|
|
85
|
+
pkg_json = entry / "package.json"
|
|
86
|
+
if not pkg_json.exists():
|
|
87
|
+
continue
|
|
88
|
+
try:
|
|
89
|
+
data = json.loads(pkg_json.read_text(encoding="utf-8", errors="replace"))
|
|
90
|
+
publisher = data.get("publisher", "")
|
|
91
|
+
name = data.get("name", "")
|
|
92
|
+
version = data.get("version", "")
|
|
93
|
+
if publisher and name:
|
|
94
|
+
exts.append({"id": f"{publisher}.{name}", "version": version})
|
|
95
|
+
except (json.JSONDecodeError, OSError):
|
|
96
|
+
continue
|
|
97
|
+
# Deduplicate (multiple versions may be installed)
|
|
98
|
+
seen: dict[str, str] = {}
|
|
99
|
+
for ext in exts:
|
|
100
|
+
ext_id = ext["id"].lower()
|
|
101
|
+
if ext_id not in seen or ext["version"] > seen[ext_id]:
|
|
102
|
+
seen[ext_id] = ext["version"]
|
|
103
|
+
return [{"id": ext_id, "version": ver} for ext_id, ver in sorted(seen.items())]
|
|
104
|
+
|
|
105
|
+
async def _collect_jetbrains_plugins(self) -> list[dict[str, str]]:
|
|
106
|
+
"""Collect JetBrains IDE plugins from filesystem."""
|
|
107
|
+
plugins: list[dict[str, str]] = []
|
|
108
|
+
jetbrains_dir = Path.home() / "Library" / "Application Support" / "JetBrains"
|
|
109
|
+
if not jetbrains_dir.exists():
|
|
110
|
+
return plugins
|
|
111
|
+
|
|
112
|
+
for ide_dir in jetbrains_dir.iterdir():
|
|
113
|
+
if not ide_dir.is_dir():
|
|
114
|
+
continue
|
|
115
|
+
plugins_dir = ide_dir / "plugins"
|
|
116
|
+
if plugins_dir.exists():
|
|
117
|
+
for plugin_dir in plugins_dir.iterdir():
|
|
118
|
+
if plugin_dir.is_dir():
|
|
119
|
+
version = self._get_jetbrains_plugin_version(plugin_dir)
|
|
120
|
+
plugins.append({
|
|
121
|
+
"id": plugin_dir.name,
|
|
122
|
+
"ide": ide_dir.name,
|
|
123
|
+
"version": version,
|
|
124
|
+
})
|
|
125
|
+
return plugins
|
|
126
|
+
|
|
127
|
+
def _get_jetbrains_plugin_version(self, plugin_dir: Path) -> str:
|
|
128
|
+
"""Extract version from a JetBrains plugin's JAR META-INF/plugin.xml."""
|
|
129
|
+
lib_dir = plugin_dir / "lib"
|
|
130
|
+
if not lib_dir.exists():
|
|
131
|
+
return ""
|
|
132
|
+
|
|
133
|
+
for jar_path in lib_dir.glob("*.jar"):
|
|
134
|
+
try:
|
|
135
|
+
with zipfile.ZipFile(jar_path) as zf:
|
|
136
|
+
if "META-INF/plugin.xml" not in zf.namelist():
|
|
137
|
+
continue
|
|
138
|
+
plugin_xml = zf.read("META-INF/plugin.xml").decode("utf-8", errors="replace")
|
|
139
|
+
match = re.search(r"<version>([^<]+)</version>", plugin_xml)
|
|
140
|
+
if match:
|
|
141
|
+
return match.group(1)
|
|
142
|
+
except (zipfile.BadZipFile, OSError, KeyError):
|
|
143
|
+
continue
|
|
144
|
+
return ""
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
"""IDE collector."""
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
from posture_agent.collectors.base import BaseCollector, CollectorResult
|
|
6
|
+
from posture_agent.utils.shell import check_version, run_command
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
# IDE definitions: (name, binary, app_path, version_flag)
|
|
10
|
+
IDE_DEFINITIONS = [
|
|
11
|
+
("VS Code", "code", "/Applications/Visual Studio Code.app", "--version"),
|
|
12
|
+
("Cursor", "cursor", "/Applications/Cursor.app", "--version"),
|
|
13
|
+
("IntelliJ IDEA", "idea", "/Applications/IntelliJ IDEA.app", None),
|
|
14
|
+
("PyCharm", "pycharm", "/Applications/PyCharm.app", None),
|
|
15
|
+
("WebStorm", "webstorm", "/Applications/WebStorm.app", None),
|
|
16
|
+
("GoLand", "goland", "/Applications/GoLand.app", None),
|
|
17
|
+
("Xcode", None, "/Applications/Xcode.app", None),
|
|
18
|
+
("Vim", "vim", None, "--version"),
|
|
19
|
+
("Neovim", "nvim", None, "--version"),
|
|
20
|
+
("Emacs", "emacs", None, "--version"),
|
|
21
|
+
("Sublime Text", "subl", "/Applications/Sublime Text.app", "--version"),
|
|
22
|
+
("Zed", "zed", "/Applications/Zed.app", "--version"),
|
|
23
|
+
]
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class IDECollector(BaseCollector):
|
|
27
|
+
"""Collects installed IDE information."""
|
|
28
|
+
|
|
29
|
+
name = "ides"
|
|
30
|
+
|
|
31
|
+
async def collect(self) -> CollectorResult:
|
|
32
|
+
errors: list[str] = []
|
|
33
|
+
ides: list[dict[str, str]] = []
|
|
34
|
+
|
|
35
|
+
for name, binary, app_path, version_flag in IDE_DEFINITIONS:
|
|
36
|
+
try:
|
|
37
|
+
installed = False
|
|
38
|
+
version = ""
|
|
39
|
+
|
|
40
|
+
# Check app bundle
|
|
41
|
+
if app_path and Path(app_path).exists():
|
|
42
|
+
installed = True
|
|
43
|
+
# Try to get version from Info.plist
|
|
44
|
+
plist_path = Path(app_path) / "Contents" / "Info.plist"
|
|
45
|
+
if plist_path.exists():
|
|
46
|
+
result = await run_command(
|
|
47
|
+
"defaults", "read", str(plist_path), "CFBundleShortVersionString"
|
|
48
|
+
)
|
|
49
|
+
if result:
|
|
50
|
+
version = result.strip()
|
|
51
|
+
|
|
52
|
+
# Check binary
|
|
53
|
+
if binary:
|
|
54
|
+
which_result = await run_command("which", binary)
|
|
55
|
+
if which_result and which_result.strip():
|
|
56
|
+
installed = True
|
|
57
|
+
if version_flag and not version:
|
|
58
|
+
ver = await check_version(binary, version_flag)
|
|
59
|
+
if ver:
|
|
60
|
+
version = ver
|
|
61
|
+
|
|
62
|
+
if installed:
|
|
63
|
+
ides.append({
|
|
64
|
+
"name": name,
|
|
65
|
+
"version": version,
|
|
66
|
+
"binary": binary or "",
|
|
67
|
+
})
|
|
68
|
+
except Exception as e:
|
|
69
|
+
errors.append(f"{name}: {e}")
|
|
70
|
+
|
|
71
|
+
return CollectorResult(
|
|
72
|
+
collector=self.name,
|
|
73
|
+
data=ides,
|
|
74
|
+
errors=errors,
|
|
75
|
+
)
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
"""Machine info collector."""
|
|
2
|
+
|
|
3
|
+
import platform
|
|
4
|
+
|
|
5
|
+
from posture_agent.collectors.base import BaseCollector, CollectorResult
|
|
6
|
+
from posture_agent.utils.shell import run_command
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class MachineCollector(BaseCollector):
|
|
10
|
+
"""Collects machine hardware and OS information."""
|
|
11
|
+
|
|
12
|
+
name = "machine"
|
|
13
|
+
|
|
14
|
+
async def collect(self) -> CollectorResult:
|
|
15
|
+
errors: list[str] = []
|
|
16
|
+
|
|
17
|
+
hostname = platform.node()
|
|
18
|
+
os_name = "macOS"
|
|
19
|
+
os_version = platform.mac_ver()[0] or platform.release()
|
|
20
|
+
architecture = platform.machine()
|
|
21
|
+
|
|
22
|
+
# CPU info
|
|
23
|
+
cpu_brand = ""
|
|
24
|
+
cpu_cores = 0
|
|
25
|
+
try:
|
|
26
|
+
result = await run_command("sysctl", "-n", "machdep.cpu.brand_string")
|
|
27
|
+
cpu_brand = result.strip() if result else ""
|
|
28
|
+
except Exception as e:
|
|
29
|
+
errors.append(f"CPU brand: {e}")
|
|
30
|
+
|
|
31
|
+
try:
|
|
32
|
+
result = await run_command("sysctl", "-n", "hw.ncpu")
|
|
33
|
+
cpu_cores = int(result.strip()) if result else 0
|
|
34
|
+
except Exception as e:
|
|
35
|
+
errors.append(f"CPU cores: {e}")
|
|
36
|
+
|
|
37
|
+
# Memory
|
|
38
|
+
memory_gb = 0
|
|
39
|
+
try:
|
|
40
|
+
result = await run_command("sysctl", "-n", "hw.memsize")
|
|
41
|
+
if result:
|
|
42
|
+
memory_gb = round(int(result.strip()) / (1024**3), 1)
|
|
43
|
+
except Exception as e:
|
|
44
|
+
errors.append(f"Memory: {e}")
|
|
45
|
+
|
|
46
|
+
# Username
|
|
47
|
+
import os
|
|
48
|
+
username = os.environ.get("USER", "")
|
|
49
|
+
|
|
50
|
+
return CollectorResult(
|
|
51
|
+
collector=self.name,
|
|
52
|
+
data={
|
|
53
|
+
"hostname": hostname,
|
|
54
|
+
"username": username,
|
|
55
|
+
"os_name": os_name,
|
|
56
|
+
"os_version": os_version,
|
|
57
|
+
"architecture": architecture,
|
|
58
|
+
"cpu_brand": cpu_brand,
|
|
59
|
+
"cpu_cores": cpu_cores,
|
|
60
|
+
"memory_gb": memory_gb,
|
|
61
|
+
},
|
|
62
|
+
errors=errors,
|
|
63
|
+
)
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
"""Package manager collector."""
|
|
2
|
+
|
|
3
|
+
from posture_agent.collectors.base import BaseCollector, CollectorResult
|
|
4
|
+
from posture_agent.utils.shell import check_version, run_command
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
# Package manager definitions: (name, binary, version_flag)
|
|
8
|
+
PACKAGE_MANAGERS = [
|
|
9
|
+
("Homebrew", "brew", "--version"),
|
|
10
|
+
("npm", "npm", "--version"),
|
|
11
|
+
("pnpm", "pnpm", "--version"),
|
|
12
|
+
("yarn", "yarn", "--version"),
|
|
13
|
+
("pip", "pip3", "--version"),
|
|
14
|
+
("uv", "uv", "--version"),
|
|
15
|
+
("pipx", "pipx", "--version"),
|
|
16
|
+
("Cargo", "cargo", "--version"),
|
|
17
|
+
("Go Modules", "go", "version"),
|
|
18
|
+
("Composer", "composer", "--version"),
|
|
19
|
+
("Gem", "gem", "--version"),
|
|
20
|
+
("CocoaPods", "pod", "--version"),
|
|
21
|
+
("Swift PM", "swift", "package --version"),
|
|
22
|
+
]
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class PackageManagerCollector(BaseCollector):
|
|
26
|
+
"""Collects installed package managers."""
|
|
27
|
+
|
|
28
|
+
name = "package_managers"
|
|
29
|
+
|
|
30
|
+
async def collect(self) -> CollectorResult:
|
|
31
|
+
errors: list[str] = []
|
|
32
|
+
managers: list[dict[str, str]] = []
|
|
33
|
+
|
|
34
|
+
for name, binary, version_flag in PACKAGE_MANAGERS:
|
|
35
|
+
try:
|
|
36
|
+
which_result = await run_command("which", binary)
|
|
37
|
+
if which_result and which_result.strip():
|
|
38
|
+
version = await check_version(binary, version_flag) or ""
|
|
39
|
+
managers.append({
|
|
40
|
+
"name": name,
|
|
41
|
+
"binary": binary,
|
|
42
|
+
"version": version,
|
|
43
|
+
})
|
|
44
|
+
except Exception as e:
|
|
45
|
+
errors.append(f"{name}: {e}")
|
|
46
|
+
|
|
47
|
+
return CollectorResult(
|
|
48
|
+
collector=self.name,
|
|
49
|
+
data=managers,
|
|
50
|
+
errors=errors,
|
|
51
|
+
)
|
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
"""Security practices collector."""
|
|
2
|
+
|
|
3
|
+
from posture_agent.collectors.base import BaseCollector, CollectorResult
|
|
4
|
+
from posture_agent.utils.shell import run_command
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class SecurityCollector(BaseCollector):
|
|
8
|
+
"""Collects security configuration information."""
|
|
9
|
+
|
|
10
|
+
name = "security"
|
|
11
|
+
|
|
12
|
+
async def collect(self) -> CollectorResult:
|
|
13
|
+
errors: list[str] = []
|
|
14
|
+
security: dict[str, object] = {}
|
|
15
|
+
|
|
16
|
+
# Git commit signing
|
|
17
|
+
try:
|
|
18
|
+
gpg_sign = await run_command("git", "config", "--global", "commit.gpgsign")
|
|
19
|
+
security["git_signing_enabled"] = gpg_sign is not None and gpg_sign.strip().lower() == "true"
|
|
20
|
+
|
|
21
|
+
sign_format = await run_command("git", "config", "--global", "gpg.format")
|
|
22
|
+
security["git_signing_format"] = sign_format.strip() if sign_format else "gpg"
|
|
23
|
+
except Exception as e:
|
|
24
|
+
security["git_signing_enabled"] = False
|
|
25
|
+
errors.append(f"Git signing: {e}")
|
|
26
|
+
|
|
27
|
+
# Git user info
|
|
28
|
+
try:
|
|
29
|
+
git_name = await run_command("git", "config", "--global", "user.name")
|
|
30
|
+
security["git_name"] = git_name.strip() if git_name else ""
|
|
31
|
+
|
|
32
|
+
git_email = await run_command("git", "config", "--global", "user.email")
|
|
33
|
+
security["git_email"] = git_email.strip() if git_email else ""
|
|
34
|
+
except Exception as e:
|
|
35
|
+
errors.append(f"Git user: {e}")
|
|
36
|
+
|
|
37
|
+
# SSH keys - detect private keys by content, not just .pub files
|
|
38
|
+
try:
|
|
39
|
+
from pathlib import Path
|
|
40
|
+
ssh_dir = Path.home() / ".ssh"
|
|
41
|
+
ssh_keys: list[dict[str, str]] = []
|
|
42
|
+
if ssh_dir.exists():
|
|
43
|
+
for f in ssh_dir.iterdir():
|
|
44
|
+
if not f.is_file():
|
|
45
|
+
continue
|
|
46
|
+
# Skip known non-key files
|
|
47
|
+
if f.name in ("config", "known_hosts", "known_hosts.old", "authorized_keys", "environment"):
|
|
48
|
+
continue
|
|
49
|
+
if f.suffix in (".pub", ".old", ".bak", ".log"):
|
|
50
|
+
continue
|
|
51
|
+
# Check if file looks like a private key
|
|
52
|
+
try:
|
|
53
|
+
header = f.read_bytes()[:64].decode("utf-8", errors="ignore")
|
|
54
|
+
key_type = None
|
|
55
|
+
if "BEGIN OPENSSH PRIVATE KEY" in header:
|
|
56
|
+
key_type = "openssh"
|
|
57
|
+
elif "BEGIN RSA PRIVATE KEY" in header:
|
|
58
|
+
key_type = "rsa"
|
|
59
|
+
elif "BEGIN EC PRIVATE KEY" in header:
|
|
60
|
+
key_type = "ecdsa"
|
|
61
|
+
elif "BEGIN DSA PRIVATE KEY" in header:
|
|
62
|
+
key_type = "dsa"
|
|
63
|
+
if key_type:
|
|
64
|
+
# Try to determine algorithm from filename
|
|
65
|
+
algo = key_type
|
|
66
|
+
if "ed25519" in f.name:
|
|
67
|
+
algo = "ed25519"
|
|
68
|
+
elif "ecdsa" in f.name:
|
|
69
|
+
algo = "ecdsa"
|
|
70
|
+
elif "rsa" in f.name:
|
|
71
|
+
algo = "rsa"
|
|
72
|
+
elif "dsa" in f.name:
|
|
73
|
+
algo = "dsa"
|
|
74
|
+
ssh_keys.append({"name": f.name, "algorithm": algo})
|
|
75
|
+
except (OSError, PermissionError):
|
|
76
|
+
continue
|
|
77
|
+
security["ssh_key_count"] = len(ssh_keys)
|
|
78
|
+
security["ssh_keys"] = ssh_keys
|
|
79
|
+
|
|
80
|
+
# Check SSH agent for loaded keys
|
|
81
|
+
agent_output = await run_command("ssh-add", "-l")
|
|
82
|
+
if agent_output and "no identities" not in agent_output.lower() and "error" not in agent_output.lower():
|
|
83
|
+
loaded_keys = [
|
|
84
|
+
line.strip() for line in agent_output.strip().splitlines()
|
|
85
|
+
if line.strip() and not line.startswith("The agent")
|
|
86
|
+
]
|
|
87
|
+
security["ssh_agent_keys"] = len(loaded_keys)
|
|
88
|
+
else:
|
|
89
|
+
security["ssh_agent_keys"] = 0
|
|
90
|
+
except Exception as e:
|
|
91
|
+
errors.append(f"SSH keys: {e}")
|
|
92
|
+
|
|
93
|
+
# FileVault (disk encryption)
|
|
94
|
+
try:
|
|
95
|
+
fv_status = await run_command("fdesetup", "status")
|
|
96
|
+
security["filevault_enabled"] = (
|
|
97
|
+
fv_status is not None and "On" in fv_status
|
|
98
|
+
)
|
|
99
|
+
except Exception as e:
|
|
100
|
+
security["filevault_enabled"] = False
|
|
101
|
+
errors.append(f"FileVault: {e}")
|
|
102
|
+
|
|
103
|
+
# Firewall
|
|
104
|
+
try:
|
|
105
|
+
fw_status = await run_command(
|
|
106
|
+
"/usr/libexec/ApplicationFirewall/socketfilterfw", "--getglobalstate"
|
|
107
|
+
)
|
|
108
|
+
security["firewall_enabled"] = (
|
|
109
|
+
fw_status is not None and "enabled" in fw_status.lower()
|
|
110
|
+
)
|
|
111
|
+
except Exception as e:
|
|
112
|
+
security["firewall_enabled"] = False
|
|
113
|
+
errors.append(f"Firewall: {e}")
|
|
114
|
+
|
|
115
|
+
# Pre-commit hooks (global)
|
|
116
|
+
try:
|
|
117
|
+
hooks_path = await run_command("git", "config", "--global", "core.hooksPath")
|
|
118
|
+
security["global_hooks_path"] = hooks_path.strip() if hooks_path else ""
|
|
119
|
+
except Exception as e:
|
|
120
|
+
errors.append(f"Hooks path: {e}")
|
|
121
|
+
|
|
122
|
+
# Git credential helper (shows secure credential management)
|
|
123
|
+
try:
|
|
124
|
+
# Check effective value (system + global + local), not just global
|
|
125
|
+
cred_helper = await run_command("git", "config", "credential.helper")
|
|
126
|
+
security["credential_helper"] = cred_helper.strip() if cred_helper else ""
|
|
127
|
+
except Exception as e:
|
|
128
|
+
errors.append(f"Credential helper: {e}")
|
|
129
|
+
|
|
130
|
+
# Allowed signers (SSH signing verification)
|
|
131
|
+
try:
|
|
132
|
+
from pathlib import Path
|
|
133
|
+
allowed_signers = await run_command("git", "config", "--global", "gpg.ssh.allowedSignersFile")
|
|
134
|
+
signers_path = allowed_signers.strip() if allowed_signers else ""
|
|
135
|
+
security["allowed_signers_configured"] = bool(signers_path)
|
|
136
|
+
if signers_path:
|
|
137
|
+
expanded = Path(signers_path).expanduser()
|
|
138
|
+
security["allowed_signers_exists"] = expanded.exists()
|
|
139
|
+
else:
|
|
140
|
+
security["allowed_signers_exists"] = False
|
|
141
|
+
except Exception as e:
|
|
142
|
+
security["allowed_signers_configured"] = False
|
|
143
|
+
errors.append(f"Allowed signers: {e}")
|
|
144
|
+
|
|
145
|
+
# SSH agent type (1Password, Secretive, Apple Keychain, standard)
|
|
146
|
+
try:
|
|
147
|
+
from pathlib import Path
|
|
148
|
+
ssh_auth_sock = await run_command("printenv", "SSH_AUTH_SOCK")
|
|
149
|
+
sock_path = ssh_auth_sock.strip() if ssh_auth_sock else ""
|
|
150
|
+
if "1password" in sock_path.lower() or "1Password" in sock_path:
|
|
151
|
+
security["ssh_agent_type"] = "1Password"
|
|
152
|
+
elif "secretive" in sock_path.lower():
|
|
153
|
+
security["ssh_agent_type"] = "Secretive"
|
|
154
|
+
elif "com.apple.launchd" in sock_path:
|
|
155
|
+
security["ssh_agent_type"] = "Apple Keychain"
|
|
156
|
+
elif sock_path:
|
|
157
|
+
security["ssh_agent_type"] = "standard"
|
|
158
|
+
else:
|
|
159
|
+
security["ssh_agent_type"] = "none"
|
|
160
|
+
except Exception as e:
|
|
161
|
+
security["ssh_agent_type"] = "unknown"
|
|
162
|
+
errors.append(f"SSH agent type: {e}")
|
|
163
|
+
|
|
164
|
+
# Gatekeeper status (macOS app security)
|
|
165
|
+
try:
|
|
166
|
+
gk_status = await run_command("spctl", "--status")
|
|
167
|
+
security["gatekeeper_enabled"] = (
|
|
168
|
+
gk_status is not None and "enabled" in gk_status.lower()
|
|
169
|
+
)
|
|
170
|
+
except Exception as e:
|
|
171
|
+
security["gatekeeper_enabled"] = False
|
|
172
|
+
errors.append(f"Gatekeeper: {e}")
|
|
173
|
+
|
|
174
|
+
return CollectorResult(
|
|
175
|
+
collector=self.name,
|
|
176
|
+
data=security,
|
|
177
|
+
errors=errors,
|
|
178
|
+
)
|
|
File without changes
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
"""Application configuration loaded from YAML files with env overrides."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
from functools import lru_cache
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
import yaml
|
|
8
|
+
from pydantic import BaseModel
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class AppConfig(BaseModel):
|
|
12
|
+
"""Application configuration."""
|
|
13
|
+
|
|
14
|
+
name: str = "Intercept Posture Agent"
|
|
15
|
+
debug: bool = False
|
|
16
|
+
log_level: str = "INFO"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class APIConfig(BaseModel):
|
|
20
|
+
"""API connection configuration."""
|
|
21
|
+
|
|
22
|
+
url: str = "http://localhost:8000"
|
|
23
|
+
timeout: int = 30
|
|
24
|
+
retries: int = 3
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class CollectorsConfig(BaseModel):
|
|
28
|
+
"""Collector enable/disable flags."""
|
|
29
|
+
|
|
30
|
+
machine: bool = True
|
|
31
|
+
ides: bool = True
|
|
32
|
+
extensions: bool = True
|
|
33
|
+
ai_tools: bool = True
|
|
34
|
+
dev_tools: bool = True
|
|
35
|
+
security: bool = True
|
|
36
|
+
package_managers: bool = True
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class ScheduleConfig(BaseModel):
|
|
40
|
+
"""Schedule configuration."""
|
|
41
|
+
|
|
42
|
+
interval_seconds: int = 3600
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class Settings:
|
|
46
|
+
"""Combined settings from YAML config and environment."""
|
|
47
|
+
|
|
48
|
+
def __init__(self) -> None:
|
|
49
|
+
self._load_config()
|
|
50
|
+
|
|
51
|
+
def _load_config(self) -> None:
|
|
52
|
+
"""Load configuration from YAML file based on ENVIRONMENT variable."""
|
|
53
|
+
env = os.getenv("ENVIRONMENT", "dev-local")
|
|
54
|
+
user_config = Path.home() / ".config" / "intercept" / "agent.yaml"
|
|
55
|
+
config_paths = [
|
|
56
|
+
user_config,
|
|
57
|
+
Path(f"config/{env}.yaml"),
|
|
58
|
+
Path(f"../config/{env}.yaml"),
|
|
59
|
+
Path(__file__).parent.parent.parent / "config" / f"{env}.yaml",
|
|
60
|
+
]
|
|
61
|
+
|
|
62
|
+
config_data: dict = {}
|
|
63
|
+
for config_path in config_paths:
|
|
64
|
+
if config_path.exists():
|
|
65
|
+
with open(config_path) as f:
|
|
66
|
+
config_data = yaml.safe_load(f) or {}
|
|
67
|
+
break
|
|
68
|
+
|
|
69
|
+
self.app = AppConfig(**config_data.get("app", {}))
|
|
70
|
+
self.api = APIConfig(**config_data.get("api", {}))
|
|
71
|
+
self.collectors = CollectorsConfig(**config_data.get("collectors", {}))
|
|
72
|
+
self.schedule = ScheduleConfig(**config_data.get("schedule", {}))
|
|
73
|
+
|
|
74
|
+
# Environment variable overrides
|
|
75
|
+
if api_url := os.environ.get("INTERCEPT_API_URL"):
|
|
76
|
+
self.api = self.api.model_copy(update={"url": api_url})
|
|
77
|
+
|
|
78
|
+
@property
|
|
79
|
+
def agent_version(self) -> str:
|
|
80
|
+
"""Read version from VERSION file."""
|
|
81
|
+
version_path = Path(__file__).parent.parent.parent / "VERSION"
|
|
82
|
+
if version_path.exists():
|
|
83
|
+
return version_path.read_text().strip()
|
|
84
|
+
return "0.1.0"
|
|
85
|
+
|
|
86
|
+
# Convenience properties
|
|
87
|
+
@property
|
|
88
|
+
def debug(self) -> bool:
|
|
89
|
+
return self.app.debug
|
|
90
|
+
|
|
91
|
+
@property
|
|
92
|
+
def log_level(self) -> str:
|
|
93
|
+
return self.app.log_level
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
@lru_cache
|
|
97
|
+
def get_settings() -> Settings:
|
|
98
|
+
"""Get cached settings instance."""
|
|
99
|
+
return Settings()
|
posture_agent/main.py
ADDED
|
@@ -0,0 +1,224 @@
|
|
|
1
|
+
"""Intercept Posture Agent CLI."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import json
|
|
5
|
+
import sys
|
|
6
|
+
from datetime import datetime, timezone
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
import click
|
|
10
|
+
|
|
11
|
+
from posture_agent.collectors.ai_tools import AIToolsCollector
|
|
12
|
+
from posture_agent.collectors.dev_tools import DevToolsCollector
|
|
13
|
+
from posture_agent.collectors.extensions import ExtensionCollector
|
|
14
|
+
from posture_agent.collectors.ides import IDECollector
|
|
15
|
+
from posture_agent.collectors.machine import MachineCollector
|
|
16
|
+
from posture_agent.collectors.package_managers import PackageManagerCollector
|
|
17
|
+
from posture_agent.collectors.security import SecurityCollector
|
|
18
|
+
from posture_agent.core.config import get_settings
|
|
19
|
+
from posture_agent.models.report import PostureReportPayload
|
|
20
|
+
from posture_agent.services.fingerprint import get_machine_fingerprint
|
|
21
|
+
from posture_agent.services.reporter import send_report
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
PLIST_NAME = "com.hijacksecurity.intercept-agent"
|
|
25
|
+
PLIST_PATH = Path.home() / "Library" / "LaunchAgents" / f"{PLIST_NAME}.plist"
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
async def run_collection(report: bool = False) -> None:
|
|
29
|
+
"""Run all collectors and optionally report to API."""
|
|
30
|
+
settings = get_settings()
|
|
31
|
+
|
|
32
|
+
# Get machine fingerprint
|
|
33
|
+
fingerprint = await get_machine_fingerprint()
|
|
34
|
+
|
|
35
|
+
# Run enabled collectors
|
|
36
|
+
collectors = []
|
|
37
|
+
if settings.collectors.machine:
|
|
38
|
+
collectors.append(MachineCollector())
|
|
39
|
+
if settings.collectors.ides:
|
|
40
|
+
collectors.append(IDECollector())
|
|
41
|
+
if settings.collectors.extensions:
|
|
42
|
+
collectors.append(ExtensionCollector())
|
|
43
|
+
if settings.collectors.ai_tools:
|
|
44
|
+
collectors.append(AIToolsCollector())
|
|
45
|
+
if settings.collectors.dev_tools:
|
|
46
|
+
collectors.append(DevToolsCollector())
|
|
47
|
+
if settings.collectors.security:
|
|
48
|
+
collectors.append(SecurityCollector())
|
|
49
|
+
if settings.collectors.package_managers:
|
|
50
|
+
collectors.append(PackageManagerCollector())
|
|
51
|
+
|
|
52
|
+
results = await asyncio.gather(
|
|
53
|
+
*(c.collect() for c in collectors),
|
|
54
|
+
return_exceptions=True,
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
# Build payload
|
|
58
|
+
payload_data: dict = {
|
|
59
|
+
"fingerprint": fingerprint,
|
|
60
|
+
"agent_version": settings.agent_version,
|
|
61
|
+
"collected_at": datetime.now(timezone.utc),
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
for result in results:
|
|
65
|
+
if isinstance(result, Exception):
|
|
66
|
+
click.echo(f" Error: {result}", err=True)
|
|
67
|
+
continue
|
|
68
|
+
payload_data[result.collector] = result.data
|
|
69
|
+
|
|
70
|
+
payload = PostureReportPayload(**payload_data)
|
|
71
|
+
|
|
72
|
+
if report:
|
|
73
|
+
click.echo(f"Sending report to {settings.api.url}...")
|
|
74
|
+
try:
|
|
75
|
+
response = await send_report(payload, settings)
|
|
76
|
+
click.echo(f"Report submitted: {response.get('id', 'ok')}")
|
|
77
|
+
except Exception as e:
|
|
78
|
+
click.echo(f"Failed to send report: {e}", err=True)
|
|
79
|
+
sys.exit(1)
|
|
80
|
+
else:
|
|
81
|
+
click.echo(json.dumps(payload.model_dump(mode="json"), indent=2))
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
@click.group()
|
|
85
|
+
def cli() -> None:
|
|
86
|
+
"""Intercept Developer Posture Agent."""
|
|
87
|
+
pass
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
@cli.command()
|
|
91
|
+
@click.option("--report", is_flag=True, help="Send report to API (default: dry-run to stdout)")
|
|
92
|
+
def collect(report: bool) -> None:
|
|
93
|
+
"""Collect developer environment data."""
|
|
94
|
+
asyncio.run(run_collection(report=report))
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
@cli.command()
|
|
98
|
+
def install() -> None:
|
|
99
|
+
"""Install launchd plist for hourly collection."""
|
|
100
|
+
settings = get_settings()
|
|
101
|
+
interval = settings.schedule.interval_seconds
|
|
102
|
+
|
|
103
|
+
# Ensure config directory and logs exist
|
|
104
|
+
config_dir = Path.home() / ".config" / "intercept"
|
|
105
|
+
logs_dir = config_dir / "logs"
|
|
106
|
+
config_dir.mkdir(parents=True, exist_ok=True)
|
|
107
|
+
logs_dir.mkdir(parents=True, exist_ok=True)
|
|
108
|
+
|
|
109
|
+
# Write default config if not present
|
|
110
|
+
config_file = config_dir / "agent.yaml"
|
|
111
|
+
if not config_file.exists():
|
|
112
|
+
config_file.write_text(
|
|
113
|
+
"api:\n"
|
|
114
|
+
f" url: \"{settings.api.url}\"\n"
|
|
115
|
+
" timeout: 30\n"
|
|
116
|
+
" retries: 3\n"
|
|
117
|
+
"collectors:\n"
|
|
118
|
+
" machine: true\n"
|
|
119
|
+
" ides: true\n"
|
|
120
|
+
" extensions: true\n"
|
|
121
|
+
" ai_tools: true\n"
|
|
122
|
+
" dev_tools: true\n"
|
|
123
|
+
" security: true\n"
|
|
124
|
+
" package_managers: true\n"
|
|
125
|
+
"schedule:\n"
|
|
126
|
+
f" interval_seconds: {interval}\n"
|
|
127
|
+
)
|
|
128
|
+
click.echo(f"Config written to {config_file}")
|
|
129
|
+
|
|
130
|
+
# Find the intercept-agent binary
|
|
131
|
+
import shutil
|
|
132
|
+
agent_bin = shutil.which("intercept-agent")
|
|
133
|
+
if not agent_bin:
|
|
134
|
+
click.echo("Error: intercept-agent not found in PATH. Install with: pip install -e .", err=True)
|
|
135
|
+
sys.exit(1)
|
|
136
|
+
|
|
137
|
+
stdout_log = str(logs_dir / "agent.stdout.log")
|
|
138
|
+
stderr_log = str(logs_dir / "agent.stderr.log")
|
|
139
|
+
|
|
140
|
+
plist_content = f"""<?xml version="1.0" encoding="UTF-8"?>
|
|
141
|
+
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
|
142
|
+
<plist version="1.0">
|
|
143
|
+
<dict>
|
|
144
|
+
<key>Label</key>
|
|
145
|
+
<string>{PLIST_NAME}</string>
|
|
146
|
+
<key>ProgramArguments</key>
|
|
147
|
+
<array>
|
|
148
|
+
<string>{agent_bin}</string>
|
|
149
|
+
<string>collect</string>
|
|
150
|
+
<string>--report</string>
|
|
151
|
+
</array>
|
|
152
|
+
<key>StartInterval</key>
|
|
153
|
+
<integer>{interval}</integer>
|
|
154
|
+
<key>RunAtLoad</key>
|
|
155
|
+
<true/>
|
|
156
|
+
<key>KeepAlive</key>
|
|
157
|
+
<false/>
|
|
158
|
+
<key>StandardOutPath</key>
|
|
159
|
+
<string>{stdout_log}</string>
|
|
160
|
+
<key>StandardErrorPath</key>
|
|
161
|
+
<string>{stderr_log}</string>
|
|
162
|
+
</dict>
|
|
163
|
+
</plist>
|
|
164
|
+
"""
|
|
165
|
+
|
|
166
|
+
# Write plist
|
|
167
|
+
PLIST_PATH.parent.mkdir(parents=True, exist_ok=True)
|
|
168
|
+
PLIST_PATH.write_text(plist_content)
|
|
169
|
+
click.echo(f"Plist written to {PLIST_PATH}")
|
|
170
|
+
|
|
171
|
+
# Load with launchctl
|
|
172
|
+
import subprocess
|
|
173
|
+
subprocess.run(["launchctl", "load", str(PLIST_PATH)], check=True)
|
|
174
|
+
click.echo("Agent installed and started.")
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
@cli.command()
|
|
178
|
+
def uninstall() -> None:
|
|
179
|
+
"""Uninstall launchd plist."""
|
|
180
|
+
import subprocess
|
|
181
|
+
|
|
182
|
+
if PLIST_PATH.exists():
|
|
183
|
+
subprocess.run(["launchctl", "unload", str(PLIST_PATH)], check=False)
|
|
184
|
+
PLIST_PATH.unlink()
|
|
185
|
+
click.echo("Agent uninstalled.")
|
|
186
|
+
else:
|
|
187
|
+
click.echo("Agent not installed.")
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
@cli.command()
|
|
191
|
+
def status() -> None:
|
|
192
|
+
"""Show agent status."""
|
|
193
|
+
settings = get_settings()
|
|
194
|
+
|
|
195
|
+
click.echo(f"Agent version: {settings.agent_version}")
|
|
196
|
+
click.echo(f"API URL: {settings.api.url}")
|
|
197
|
+
click.echo(f"Schedule: every {settings.schedule.interval_seconds}s")
|
|
198
|
+
click.echo(f"Plist: {PLIST_PATH}")
|
|
199
|
+
click.echo(f"Installed: {PLIST_PATH.exists()}")
|
|
200
|
+
|
|
201
|
+
if PLIST_PATH.exists():
|
|
202
|
+
import subprocess
|
|
203
|
+
result = subprocess.run(
|
|
204
|
+
["launchctl", "list", PLIST_NAME],
|
|
205
|
+
capture_output=True, text=True,
|
|
206
|
+
)
|
|
207
|
+
if result.returncode == 0:
|
|
208
|
+
click.echo(f"Status: loaded")
|
|
209
|
+
for line in result.stdout.strip().split("\n"):
|
|
210
|
+
if "PID" in line or "LastExitStatus" in line:
|
|
211
|
+
click.echo(f" {line.strip()}")
|
|
212
|
+
else:
|
|
213
|
+
click.echo("Status: not loaded")
|
|
214
|
+
|
|
215
|
+
# Check logs
|
|
216
|
+
stdout_log = Path.home() / ".config" / "intercept" / "logs" / "agent.stdout.log"
|
|
217
|
+
if stdout_log.exists():
|
|
218
|
+
lines = stdout_log.read_text().strip().split("\n")
|
|
219
|
+
if lines:
|
|
220
|
+
click.echo(f"Last output: {lines[-1]}")
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
if __name__ == "__main__":
|
|
224
|
+
cli()
|
|
File without changes
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
"""Pydantic models for the posture report payload."""
|
|
2
|
+
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
from pydantic import BaseModel, Field
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class PostureReportPayload(BaseModel):
|
|
10
|
+
"""The full report payload sent to the API."""
|
|
11
|
+
|
|
12
|
+
fingerprint: str
|
|
13
|
+
agent_version: str
|
|
14
|
+
collected_at: datetime = Field(default_factory=datetime.utcnow)
|
|
15
|
+
machine: dict[str, Any] = {}
|
|
16
|
+
ides: list[Any] | dict[str, Any] = []
|
|
17
|
+
extensions: dict[str, Any] = {}
|
|
18
|
+
ai_tools: list[Any] | dict[str, Any] = []
|
|
19
|
+
dev_tools: list[Any] | dict[str, Any] = []
|
|
20
|
+
security: dict[str, Any] = {}
|
|
21
|
+
package_managers: list[Any] | dict[str, Any] = []
|
|
File without changes
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"""Machine fingerprint generation."""
|
|
2
|
+
|
|
3
|
+
import hashlib
|
|
4
|
+
|
|
5
|
+
from posture_agent.utils.shell import run_command
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
async def get_machine_fingerprint() -> str:
|
|
9
|
+
"""Generate a stable machine fingerprint from IOPlatformUUID."""
|
|
10
|
+
# Try IOPlatformUUID first (most reliable on macOS)
|
|
11
|
+
result = await run_command(
|
|
12
|
+
"ioreg", "-rd1", "-c", "IOPlatformExpertDevice"
|
|
13
|
+
)
|
|
14
|
+
if result:
|
|
15
|
+
for line in result.split("\n"):
|
|
16
|
+
if "IOPlatformUUID" in line:
|
|
17
|
+
# Extract UUID value
|
|
18
|
+
parts = line.split('"')
|
|
19
|
+
for i, part in enumerate(parts):
|
|
20
|
+
if part == "IOPlatformUUID" and i + 2 < len(parts):
|
|
21
|
+
uuid_val = parts[i + 2]
|
|
22
|
+
return hashlib.sha256(uuid_val.encode()).hexdigest()[:32]
|
|
23
|
+
|
|
24
|
+
# Fallback: serial number
|
|
25
|
+
serial = await run_command(
|
|
26
|
+
"ioreg", "-l", "-c", "IOPlatformExpertDevice"
|
|
27
|
+
)
|
|
28
|
+
if serial:
|
|
29
|
+
for line in serial.split("\n"):
|
|
30
|
+
if "IOPlatformSerialNumber" in line:
|
|
31
|
+
parts = line.split('"')
|
|
32
|
+
for i, part in enumerate(parts):
|
|
33
|
+
if part == "IOPlatformSerialNumber" and i + 2 < len(parts):
|
|
34
|
+
return hashlib.sha256(parts[i + 2].encode()).hexdigest()[:32]
|
|
35
|
+
|
|
36
|
+
# Last resort: hostname-based
|
|
37
|
+
import platform
|
|
38
|
+
fallback = f"{platform.node()}-{platform.machine()}"
|
|
39
|
+
return hashlib.sha256(fallback.encode()).hexdigest()[:32]
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
"""HTTP reporter for sending posture data to the API."""
|
|
2
|
+
|
|
3
|
+
import httpx
|
|
4
|
+
|
|
5
|
+
from posture_agent.core.config import Settings
|
|
6
|
+
from posture_agent.models.report import PostureReportPayload
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
async def send_report(payload: PostureReportPayload, settings: Settings) -> dict:
|
|
10
|
+
"""Send posture report to the API with retry."""
|
|
11
|
+
url = f"{settings.api.url}/api/v1/core/posture/reports"
|
|
12
|
+
|
|
13
|
+
last_error: Exception | None = None
|
|
14
|
+
for attempt in range(settings.api.retries):
|
|
15
|
+
try:
|
|
16
|
+
async with httpx.AsyncClient(timeout=settings.api.timeout) as client:
|
|
17
|
+
response = await client.post(
|
|
18
|
+
url,
|
|
19
|
+
json=payload.model_dump(mode="json"),
|
|
20
|
+
)
|
|
21
|
+
response.raise_for_status()
|
|
22
|
+
return response.json()
|
|
23
|
+
except (httpx.HTTPError, httpx.TimeoutException) as e:
|
|
24
|
+
last_error = e
|
|
25
|
+
if attempt < settings.api.retries - 1:
|
|
26
|
+
import asyncio
|
|
27
|
+
await asyncio.sleep(2 ** attempt)
|
|
28
|
+
|
|
29
|
+
raise RuntimeError(f"Failed to send report after {settings.api.retries} attempts: {last_error}")
|
|
File without changes
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"""Shell utility functions for running subprocesses."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
async def run_command(*args: str, timeout: float = 10.0) -> str | None:
|
|
7
|
+
"""Run a command and return stdout, or None on failure."""
|
|
8
|
+
try:
|
|
9
|
+
proc = await asyncio.create_subprocess_exec(
|
|
10
|
+
*args,
|
|
11
|
+
stdout=asyncio.subprocess.PIPE,
|
|
12
|
+
stderr=asyncio.subprocess.PIPE,
|
|
13
|
+
)
|
|
14
|
+
stdout, _ = await asyncio.wait_for(proc.communicate(), timeout=timeout)
|
|
15
|
+
if proc.returncode == 0 and stdout:
|
|
16
|
+
return stdout.decode("utf-8", errors="replace")
|
|
17
|
+
return None
|
|
18
|
+
except (asyncio.TimeoutError, FileNotFoundError, OSError):
|
|
19
|
+
return None
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
async def check_version(binary: str, version_flag: str) -> str | None:
|
|
23
|
+
"""Get version string from a binary. Handles multi-word flags."""
|
|
24
|
+
args = [binary] + version_flag.split()
|
|
25
|
+
result = await run_command(*args)
|
|
26
|
+
if not result:
|
|
27
|
+
return None
|
|
28
|
+
|
|
29
|
+
# Extract version from common formats
|
|
30
|
+
line = result.strip().split("\n")[0]
|
|
31
|
+
|
|
32
|
+
# Try to find version-like pattern
|
|
33
|
+
import re
|
|
34
|
+
match = re.search(r"(\d+\.\d+[\.\d]*)", line)
|
|
35
|
+
if match:
|
|
36
|
+
return match.group(1)
|
|
37
|
+
|
|
38
|
+
return line[:50] # Fallback: first 50 chars of first line
|