intercept-agent 0.2.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- intercept_agent-0.2.0/.gitignore +250 -0
- intercept_agent-0.2.0/PKG-INFO +9 -0
- intercept_agent-0.2.0/VERSION +1 -0
- intercept_agent-0.2.0/config/dev-docker.yaml +21 -0
- intercept_agent-0.2.0/config/dev-local.yaml +21 -0
- intercept_agent-0.2.0/config/prod.yaml +21 -0
- intercept_agent-0.2.0/config/test.yaml +21 -0
- intercept_agent-0.2.0/posture_agent/__init__.py +0 -0
- intercept_agent-0.2.0/posture_agent/collectors/__init__.py +0 -0
- intercept_agent-0.2.0/posture_agent/collectors/ai_tools.py +79 -0
- intercept_agent-0.2.0/posture_agent/collectors/base.py +24 -0
- intercept_agent-0.2.0/posture_agent/collectors/dev_tools.py +59 -0
- intercept_agent-0.2.0/posture_agent/collectors/extensions.py +144 -0
- intercept_agent-0.2.0/posture_agent/collectors/ides.py +75 -0
- intercept_agent-0.2.0/posture_agent/collectors/machine.py +63 -0
- intercept_agent-0.2.0/posture_agent/collectors/package_managers.py +51 -0
- intercept_agent-0.2.0/posture_agent/collectors/security.py +178 -0
- intercept_agent-0.2.0/posture_agent/core/__init__.py +0 -0
- intercept_agent-0.2.0/posture_agent/core/config.py +99 -0
- intercept_agent-0.2.0/posture_agent/main.py +224 -0
- intercept_agent-0.2.0/posture_agent/models/__init__.py +0 -0
- intercept_agent-0.2.0/posture_agent/models/report.py +21 -0
- intercept_agent-0.2.0/posture_agent/services/__init__.py +0 -0
- intercept_agent-0.2.0/posture_agent/services/fingerprint.py +39 -0
- intercept_agent-0.2.0/posture_agent/services/reporter.py +29 -0
- intercept_agent-0.2.0/posture_agent/utils/__init__.py +0 -0
- intercept_agent-0.2.0/posture_agent/utils/shell.py +38 -0
- intercept_agent-0.2.0/pyproject.toml +25 -0
- intercept_agent-0.2.0/resources/com.hijacksecurity.intercept-agent.plist +24 -0
- intercept_agent-0.2.0/tests/__init__.py +0 -0
- intercept_agent-0.2.0/tests/test_cli.py +32 -0
- intercept_agent-0.2.0/tests/test_collectors.py +104 -0
- intercept_agent-0.2.0/tests/test_fingerprint.py +18 -0
|
@@ -0,0 +1,250 @@
|
|
|
1
|
+
# Custom
|
|
2
|
+
.idea/
|
|
3
|
+
.claude/
|
|
4
|
+
.playwright-mcp/
|
|
5
|
+
|
|
6
|
+
# Redis dump files
|
|
7
|
+
*.rdb
|
|
8
|
+
|
|
9
|
+
# UV lock file (each service manages its own dependencies)
|
|
10
|
+
uv.lock
|
|
11
|
+
|
|
12
|
+
# Local data directories (SQLite databases, cloned repos, reports, etc.)
|
|
13
|
+
**/data/*.db
|
|
14
|
+
**/data/*.db-journal
|
|
15
|
+
**/data/*.db-wal
|
|
16
|
+
**/data/*.db-shm
|
|
17
|
+
**/data/clones/
|
|
18
|
+
**/data/reports/
|
|
19
|
+
|
|
20
|
+
# Playwright
|
|
21
|
+
playwright-report/
|
|
22
|
+
test-results/
|
|
23
|
+
**/e2e/screenshots/*.png
|
|
24
|
+
|
|
25
|
+
# Node.js / Next.js
|
|
26
|
+
node_modules/
|
|
27
|
+
.pnp
|
|
28
|
+
.pnp.*
|
|
29
|
+
.yarn/*
|
|
30
|
+
!.yarn/patches
|
|
31
|
+
!.yarn/plugins
|
|
32
|
+
!.yarn/releases
|
|
33
|
+
!.yarn/versions
|
|
34
|
+
.next/
|
|
35
|
+
out/
|
|
36
|
+
*.tsbuildinfo
|
|
37
|
+
next-env.d.ts
|
|
38
|
+
.vercel
|
|
39
|
+
npm-debug.log*
|
|
40
|
+
yarn-debug.log*
|
|
41
|
+
yarn-error.log*
|
|
42
|
+
.pnpm-debug.log*
|
|
43
|
+
|
|
44
|
+
# Byte-compiled / optimized / DLL files
|
|
45
|
+
__pycache__/
|
|
46
|
+
*.py[codz]
|
|
47
|
+
*$py.class
|
|
48
|
+
|
|
49
|
+
# C extensions
|
|
50
|
+
*.so
|
|
51
|
+
|
|
52
|
+
# Distribution / packaging
|
|
53
|
+
.Python
|
|
54
|
+
build/
|
|
55
|
+
develop-eggs/
|
|
56
|
+
dist/
|
|
57
|
+
downloads/
|
|
58
|
+
eggs/
|
|
59
|
+
.eggs/
|
|
60
|
+
/lib/
|
|
61
|
+
/lib64/
|
|
62
|
+
parts/
|
|
63
|
+
sdist/
|
|
64
|
+
var/
|
|
65
|
+
wheels/
|
|
66
|
+
share/python-wheels/
|
|
67
|
+
*.egg-info/
|
|
68
|
+
.installed.cfg
|
|
69
|
+
*.egg
|
|
70
|
+
MANIFEST
|
|
71
|
+
|
|
72
|
+
# PyInstaller
|
|
73
|
+
# Usually these files are written by a python script from a template
|
|
74
|
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
|
75
|
+
*.manifest
|
|
76
|
+
*.spec
|
|
77
|
+
|
|
78
|
+
# Installer logs
|
|
79
|
+
pip-log.txt
|
|
80
|
+
pip-delete-this-directory.txt
|
|
81
|
+
|
|
82
|
+
# Unit test / coverage reports
|
|
83
|
+
htmlcov/
|
|
84
|
+
.tox/
|
|
85
|
+
.nox/
|
|
86
|
+
.coverage
|
|
87
|
+
.coverage.*
|
|
88
|
+
.cache
|
|
89
|
+
nosetests.xml
|
|
90
|
+
coverage.xml
|
|
91
|
+
*.cover
|
|
92
|
+
*.py.cover
|
|
93
|
+
.hypothesis/
|
|
94
|
+
.pytest_cache/
|
|
95
|
+
cover/
|
|
96
|
+
|
|
97
|
+
# Translations
|
|
98
|
+
*.mo
|
|
99
|
+
*.pot
|
|
100
|
+
|
|
101
|
+
# Django stuff:
|
|
102
|
+
*.log
|
|
103
|
+
local_settings.py
|
|
104
|
+
db.sqlite3
|
|
105
|
+
db.sqlite3-journal
|
|
106
|
+
|
|
107
|
+
# Flask stuff:
|
|
108
|
+
instance/
|
|
109
|
+
.webassets-cache
|
|
110
|
+
|
|
111
|
+
# Scrapy stuff:
|
|
112
|
+
.scrapy
|
|
113
|
+
|
|
114
|
+
# Sphinx documentation
|
|
115
|
+
docs/_build/
|
|
116
|
+
|
|
117
|
+
# PyBuilder
|
|
118
|
+
.pybuilder/
|
|
119
|
+
target/
|
|
120
|
+
|
|
121
|
+
# Jupyter Notebook
|
|
122
|
+
.ipynb_checkpoints
|
|
123
|
+
|
|
124
|
+
# IPython
|
|
125
|
+
profile_default/
|
|
126
|
+
ipython_config.py
|
|
127
|
+
|
|
128
|
+
# pyenv
|
|
129
|
+
# For a library or package, you might want to ignore these files since the code is
|
|
130
|
+
# intended to run in multiple environments; otherwise, check them in:
|
|
131
|
+
# .python-version
|
|
132
|
+
|
|
133
|
+
# pipenv
|
|
134
|
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
|
135
|
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
|
136
|
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
|
137
|
+
# install all needed dependencies.
|
|
138
|
+
#Pipfile.lock
|
|
139
|
+
|
|
140
|
+
# UV
|
|
141
|
+
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
|
|
142
|
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
|
143
|
+
# commonly ignored for libraries.
|
|
144
|
+
#uv.lock
|
|
145
|
+
|
|
146
|
+
# poetry
|
|
147
|
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
|
148
|
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
|
149
|
+
# commonly ignored for libraries.
|
|
150
|
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
|
151
|
+
#poetry.lock
|
|
152
|
+
#poetry.toml
|
|
153
|
+
|
|
154
|
+
# pdm
|
|
155
|
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
|
156
|
+
# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
|
|
157
|
+
# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
|
|
158
|
+
#pdm.lock
|
|
159
|
+
#pdm.toml
|
|
160
|
+
.pdm-python
|
|
161
|
+
.pdm-build/
|
|
162
|
+
|
|
163
|
+
# pixi
|
|
164
|
+
# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
|
|
165
|
+
#pixi.lock
|
|
166
|
+
# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
|
|
167
|
+
# in the .venv directory. It is recommended not to include this directory in version control.
|
|
168
|
+
.pixi
|
|
169
|
+
|
|
170
|
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
|
171
|
+
__pypackages__/
|
|
172
|
+
|
|
173
|
+
# Celery stuff
|
|
174
|
+
celerybeat-schedule
|
|
175
|
+
celerybeat.pid
|
|
176
|
+
|
|
177
|
+
# SageMath parsed files
|
|
178
|
+
*.sage.py
|
|
179
|
+
|
|
180
|
+
# Environments
|
|
181
|
+
.env
|
|
182
|
+
.envrc
|
|
183
|
+
.venv
|
|
184
|
+
env/
|
|
185
|
+
venv/
|
|
186
|
+
ENV/
|
|
187
|
+
env.bak/
|
|
188
|
+
venv.bak/
|
|
189
|
+
|
|
190
|
+
# Spyder project settings
|
|
191
|
+
.spyderproject
|
|
192
|
+
.spyproject
|
|
193
|
+
|
|
194
|
+
# Rope project settings
|
|
195
|
+
.ropeproject
|
|
196
|
+
|
|
197
|
+
# mkdocs documentation
|
|
198
|
+
/site
|
|
199
|
+
|
|
200
|
+
# mypy
|
|
201
|
+
.mypy_cache/
|
|
202
|
+
.dmypy.json
|
|
203
|
+
dmypy.json
|
|
204
|
+
|
|
205
|
+
# Pyre type checker
|
|
206
|
+
.pyre/
|
|
207
|
+
|
|
208
|
+
# pytype static type analyzer
|
|
209
|
+
.pytype/
|
|
210
|
+
|
|
211
|
+
# Cython debug symbols
|
|
212
|
+
cython_debug/
|
|
213
|
+
|
|
214
|
+
# PyCharm
|
|
215
|
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
|
216
|
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
|
217
|
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
|
218
|
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
|
219
|
+
#.idea/
|
|
220
|
+
|
|
221
|
+
# Abstra
|
|
222
|
+
# Abstra is an AI-powered process automation framework.
|
|
223
|
+
# Ignore directories containing user credentials, local state, and settings.
|
|
224
|
+
# Learn more at https://abstra.io/docs
|
|
225
|
+
.abstra/
|
|
226
|
+
|
|
227
|
+
# Visual Studio Code
|
|
228
|
+
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
|
|
229
|
+
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
|
|
230
|
+
# and can be added to the global gitignore or merged into this file. However, if you prefer,
|
|
231
|
+
# you could uncomment the following to ignore the entire vscode folder
|
|
232
|
+
# .vscode/
|
|
233
|
+
|
|
234
|
+
# Ruff stuff:
|
|
235
|
+
.ruff_cache/
|
|
236
|
+
|
|
237
|
+
# PyPI configuration file
|
|
238
|
+
.pypirc
|
|
239
|
+
|
|
240
|
+
# Cursor
|
|
241
|
+
# Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to
|
|
242
|
+
# exclude from AI features like autocomplete and code analysis. Recommended for sensitive data
|
|
243
|
+
# refer to https://docs.cursor.com/context/ignore-files
|
|
244
|
+
.cursorignore
|
|
245
|
+
.cursorindexingignore
|
|
246
|
+
|
|
247
|
+
# Marimo
|
|
248
|
+
marimo/_static/
|
|
249
|
+
marimo/_lsp/
|
|
250
|
+
__marimo__/
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: intercept-agent
|
|
3
|
+
Version: 0.2.0
|
|
4
|
+
Summary: Intercept Developer Posture Agent - collects developer environment data
|
|
5
|
+
Requires-Python: >=3.12
|
|
6
|
+
Requires-Dist: click>=8.1.0
|
|
7
|
+
Requires-Dist: httpx>=0.28.0
|
|
8
|
+
Requires-Dist: pydantic>=2.10.0
|
|
9
|
+
Requires-Dist: pyyaml>=6.0.0
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
0.2.0
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
app:
|
|
2
|
+
name: "Intercept Posture Agent"
|
|
3
|
+
debug: true
|
|
4
|
+
log_level: "INFO"
|
|
5
|
+
|
|
6
|
+
api:
|
|
7
|
+
url: "http://api:8000"
|
|
8
|
+
timeout: 30
|
|
9
|
+
retries: 3
|
|
10
|
+
|
|
11
|
+
collectors:
|
|
12
|
+
machine: true
|
|
13
|
+
ides: true
|
|
14
|
+
extensions: true
|
|
15
|
+
ai_tools: true
|
|
16
|
+
dev_tools: true
|
|
17
|
+
security: true
|
|
18
|
+
package_managers: true
|
|
19
|
+
|
|
20
|
+
schedule:
|
|
21
|
+
interval_seconds: 3600
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
app:
|
|
2
|
+
name: "Intercept Posture Agent"
|
|
3
|
+
debug: true
|
|
4
|
+
log_level: "DEBUG"
|
|
5
|
+
|
|
6
|
+
api:
|
|
7
|
+
url: "http://localhost:8000"
|
|
8
|
+
timeout: 30
|
|
9
|
+
retries: 3
|
|
10
|
+
|
|
11
|
+
collectors:
|
|
12
|
+
machine: true
|
|
13
|
+
ides: true
|
|
14
|
+
extensions: true
|
|
15
|
+
ai_tools: true
|
|
16
|
+
dev_tools: true
|
|
17
|
+
security: true
|
|
18
|
+
package_managers: true
|
|
19
|
+
|
|
20
|
+
schedule:
|
|
21
|
+
interval_seconds: 3600
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
app:
|
|
2
|
+
name: "Intercept Posture Agent"
|
|
3
|
+
debug: false
|
|
4
|
+
log_level: "WARNING"
|
|
5
|
+
|
|
6
|
+
api:
|
|
7
|
+
url: "http://api:8000"
|
|
8
|
+
timeout: 30
|
|
9
|
+
retries: 3
|
|
10
|
+
|
|
11
|
+
collectors:
|
|
12
|
+
machine: true
|
|
13
|
+
ides: true
|
|
14
|
+
extensions: true
|
|
15
|
+
ai_tools: true
|
|
16
|
+
dev_tools: true
|
|
17
|
+
security: true
|
|
18
|
+
package_managers: true
|
|
19
|
+
|
|
20
|
+
schedule:
|
|
21
|
+
interval_seconds: 3600
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
app:
|
|
2
|
+
name: "Intercept Posture Agent"
|
|
3
|
+
debug: false
|
|
4
|
+
log_level: "INFO"
|
|
5
|
+
|
|
6
|
+
api:
|
|
7
|
+
url: "hhttps://intercept.test.hijacksecurity.com"
|
|
8
|
+
timeout: 10
|
|
9
|
+
retries: 1
|
|
10
|
+
|
|
11
|
+
collectors:
|
|
12
|
+
machine: true
|
|
13
|
+
ides: true
|
|
14
|
+
extensions: true
|
|
15
|
+
ai_tools: true
|
|
16
|
+
dev_tools: true
|
|
17
|
+
security: true
|
|
18
|
+
package_managers: true
|
|
19
|
+
|
|
20
|
+
schedule:
|
|
21
|
+
interval_seconds: 3600
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
"""AI tools collector."""
|
|
2
|
+
|
|
3
|
+
from posture_agent.collectors.base import BaseCollector, CollectorResult
|
|
4
|
+
from posture_agent.utils.shell import check_version, run_command
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
# Known AI extension IDs across editors
|
|
8
|
+
AI_EXTENSIONS = {
|
|
9
|
+
"github.copilot",
|
|
10
|
+
"github.copilot-chat",
|
|
11
|
+
"sourcegraph.cody-ai",
|
|
12
|
+
"continue.continue",
|
|
13
|
+
"amazonwebservices.amazon-q-vscode",
|
|
14
|
+
"saoudrizwan.claude-dev",
|
|
15
|
+
"cursor.cursor-ai",
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
# CLI-based AI tools: (name, binary, version_flag)
|
|
19
|
+
AI_CLI_TOOLS = [
|
|
20
|
+
("Claude Code", "claude", "--version"),
|
|
21
|
+
("GitHub Copilot CLI", "github-copilot-cli", "--version"),
|
|
22
|
+
("Aider", "aider", "--version"),
|
|
23
|
+
("Open Interpreter", "interpreter", "--version"),
|
|
24
|
+
]
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class AIToolsCollector(BaseCollector):
|
|
28
|
+
"""Collects AI coding tool information."""
|
|
29
|
+
|
|
30
|
+
name = "ai_tools"
|
|
31
|
+
|
|
32
|
+
async def collect(self) -> CollectorResult:
|
|
33
|
+
errors: list[str] = []
|
|
34
|
+
ai_tools: list[dict[str, str]] = []
|
|
35
|
+
|
|
36
|
+
# Check CLI tools
|
|
37
|
+
for name, binary, version_flag in AI_CLI_TOOLS:
|
|
38
|
+
try:
|
|
39
|
+
which_result = await run_command("which", binary)
|
|
40
|
+
if which_result and which_result.strip():
|
|
41
|
+
version = await check_version(binary, version_flag) or ""
|
|
42
|
+
ai_tools.append({
|
|
43
|
+
"name": name,
|
|
44
|
+
"type": "cli",
|
|
45
|
+
"binary": binary,
|
|
46
|
+
"version": version,
|
|
47
|
+
})
|
|
48
|
+
except Exception as e:
|
|
49
|
+
errors.append(f"{name}: {e}")
|
|
50
|
+
|
|
51
|
+
# Check VS Code/Cursor extensions for AI tools
|
|
52
|
+
for binary in ("code", "cursor"):
|
|
53
|
+
try:
|
|
54
|
+
which_result = await run_command("which", binary)
|
|
55
|
+
if not which_result or not which_result.strip():
|
|
56
|
+
continue
|
|
57
|
+
|
|
58
|
+
result = await run_command(binary, "--list-extensions")
|
|
59
|
+
if not result:
|
|
60
|
+
continue
|
|
61
|
+
|
|
62
|
+
installed_exts = {ext.strip().lower() for ext in result.strip().split("\n") if ext.strip()}
|
|
63
|
+
for ai_ext_id in AI_EXTENSIONS:
|
|
64
|
+
if ai_ext_id.lower() in installed_exts:
|
|
65
|
+
editor = "VS Code" if binary == "code" else "Cursor"
|
|
66
|
+
ai_tools.append({
|
|
67
|
+
"name": ai_ext_id,
|
|
68
|
+
"type": "extension",
|
|
69
|
+
"editor": editor,
|
|
70
|
+
"version": "",
|
|
71
|
+
})
|
|
72
|
+
except Exception as e:
|
|
73
|
+
errors.append(f"AI extensions ({binary}): {e}")
|
|
74
|
+
|
|
75
|
+
return CollectorResult(
|
|
76
|
+
collector=self.name,
|
|
77
|
+
data=ai_tools,
|
|
78
|
+
errors=errors,
|
|
79
|
+
)
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"""Base collector interface."""
|
|
2
|
+
|
|
3
|
+
from abc import ABC, abstractmethod
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
from pydantic import BaseModel
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class CollectorResult(BaseModel):
|
|
10
|
+
"""Result from a collector."""
|
|
11
|
+
collector: str
|
|
12
|
+
data: Any
|
|
13
|
+
errors: list[str] = []
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class BaseCollector(ABC):
|
|
17
|
+
"""Base class for all collectors."""
|
|
18
|
+
|
|
19
|
+
name: str = "base"
|
|
20
|
+
|
|
21
|
+
@abstractmethod
|
|
22
|
+
async def collect(self) -> CollectorResult:
|
|
23
|
+
"""Collect data and return a result."""
|
|
24
|
+
...
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
"""Developer tools collector."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
|
|
5
|
+
from posture_agent.collectors.base import BaseCollector, CollectorResult
|
|
6
|
+
from posture_agent.utils.shell import check_version, run_command
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
# Tool definitions: (name, binary, version_flag)
|
|
10
|
+
DEV_TOOL_DEFINITIONS = [
|
|
11
|
+
("Git", "git", "--version"),
|
|
12
|
+
("Docker", "docker", "--version"),
|
|
13
|
+
("Node.js", "node", "--version"),
|
|
14
|
+
("Python", "python3", "--version"),
|
|
15
|
+
("Go", "go", "version"),
|
|
16
|
+
("Rust", "rustc", "--version"),
|
|
17
|
+
("Ruby", "ruby", "--version"),
|
|
18
|
+
("Java", "java", "--version"),
|
|
19
|
+
("Swift", "swift", "--version"),
|
|
20
|
+
("Make", "make", "--version"),
|
|
21
|
+
("CMake", "cmake", "--version"),
|
|
22
|
+
("Terraform", "terraform", "--version"),
|
|
23
|
+
("kubectl", "kubectl", "version --client"),
|
|
24
|
+
("Helm", "helm", "version --short"),
|
|
25
|
+
("AWS CLI", "aws", "--version"),
|
|
26
|
+
("gcloud", "gcloud", "--version"),
|
|
27
|
+
("Azure CLI", "az", "--version"),
|
|
28
|
+
]
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class DevToolsCollector(BaseCollector):
|
|
32
|
+
"""Collects installed developer tools."""
|
|
33
|
+
|
|
34
|
+
name = "dev_tools"
|
|
35
|
+
|
|
36
|
+
async def collect(self) -> CollectorResult:
|
|
37
|
+
errors: list[str] = []
|
|
38
|
+
tools: list[dict[str, str]] = []
|
|
39
|
+
|
|
40
|
+
for name, binary, version_flag in DEV_TOOL_DEFINITIONS:
|
|
41
|
+
try:
|
|
42
|
+
which_result = await run_command("which", binary)
|
|
43
|
+
if which_result and which_result.strip():
|
|
44
|
+
version = await check_version(binary, version_flag) or ""
|
|
45
|
+
path = os.path.realpath(which_result.strip())
|
|
46
|
+
tools.append({
|
|
47
|
+
"name": name,
|
|
48
|
+
"binary": binary,
|
|
49
|
+
"version": version,
|
|
50
|
+
"path": path,
|
|
51
|
+
})
|
|
52
|
+
except Exception as e:
|
|
53
|
+
errors.append(f"{name}: {e}")
|
|
54
|
+
|
|
55
|
+
return CollectorResult(
|
|
56
|
+
collector=self.name,
|
|
57
|
+
data=tools,
|
|
58
|
+
errors=errors,
|
|
59
|
+
)
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
"""IDE extension collector."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import re
|
|
5
|
+
import zipfile
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from posture_agent.collectors.base import BaseCollector, CollectorResult
|
|
9
|
+
from posture_agent.utils.shell import run_command
|
|
10
|
+
|
|
11
|
+
# Map binary name to extensions directory
|
|
12
|
+
_EXT_DIRS = {
|
|
13
|
+
"code": Path.home() / ".vscode" / "extensions",
|
|
14
|
+
"cursor": Path.home() / ".cursor" / "extensions",
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class ExtensionCollector(BaseCollector):
|
|
19
|
+
"""Collects IDE extension information."""
|
|
20
|
+
|
|
21
|
+
name = "extensions"
|
|
22
|
+
|
|
23
|
+
async def collect(self) -> CollectorResult:
|
|
24
|
+
errors: list[str] = []
|
|
25
|
+
extensions: dict[str, list[dict[str, str]]] = {}
|
|
26
|
+
|
|
27
|
+
# VS Code extensions
|
|
28
|
+
try:
|
|
29
|
+
vscode_exts = await self._collect_vscode_extensions("code")
|
|
30
|
+
if vscode_exts:
|
|
31
|
+
extensions["vscode"] = vscode_exts
|
|
32
|
+
except Exception as e:
|
|
33
|
+
errors.append(f"VS Code extensions: {e}")
|
|
34
|
+
|
|
35
|
+
# Cursor extensions
|
|
36
|
+
try:
|
|
37
|
+
cursor_exts = await self._collect_vscode_extensions("cursor")
|
|
38
|
+
if cursor_exts:
|
|
39
|
+
extensions["cursor"] = cursor_exts
|
|
40
|
+
except Exception as e:
|
|
41
|
+
errors.append(f"Cursor extensions: {e}")
|
|
42
|
+
|
|
43
|
+
# JetBrains plugins
|
|
44
|
+
try:
|
|
45
|
+
jetbrains_plugins = await self._collect_jetbrains_plugins()
|
|
46
|
+
if jetbrains_plugins:
|
|
47
|
+
extensions["jetbrains"] = jetbrains_plugins
|
|
48
|
+
except Exception as e:
|
|
49
|
+
errors.append(f"JetBrains plugins: {e}")
|
|
50
|
+
|
|
51
|
+
return CollectorResult(
|
|
52
|
+
collector=self.name,
|
|
53
|
+
data=extensions,
|
|
54
|
+
errors=errors,
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
async def _collect_vscode_extensions(self, binary: str) -> list[dict[str, str]]:
|
|
58
|
+
"""Collect extensions for VS Code or Cursor via CLI or filesystem."""
|
|
59
|
+
# Try CLI first
|
|
60
|
+
which_result = await run_command("which", binary)
|
|
61
|
+
if which_result and which_result.strip():
|
|
62
|
+
result = await run_command(binary, "--list-extensions", "--show-versions")
|
|
63
|
+
if result:
|
|
64
|
+
exts = []
|
|
65
|
+
for line in result.strip().split("\n"):
|
|
66
|
+
line = line.strip()
|
|
67
|
+
if not line:
|
|
68
|
+
continue
|
|
69
|
+
if "@" in line:
|
|
70
|
+
ext_id, version = line.rsplit("@", 1)
|
|
71
|
+
exts.append({"id": ext_id, "version": version})
|
|
72
|
+
else:
|
|
73
|
+
exts.append({"id": line, "version": ""})
|
|
74
|
+
return exts
|
|
75
|
+
|
|
76
|
+
# Fall back to reading extensions directory
|
|
77
|
+
ext_dir = _EXT_DIRS.get(binary)
|
|
78
|
+
if not ext_dir or not ext_dir.exists():
|
|
79
|
+
return []
|
|
80
|
+
|
|
81
|
+
exts = []
|
|
82
|
+
for entry in ext_dir.iterdir():
|
|
83
|
+
if not entry.is_dir():
|
|
84
|
+
continue
|
|
85
|
+
pkg_json = entry / "package.json"
|
|
86
|
+
if not pkg_json.exists():
|
|
87
|
+
continue
|
|
88
|
+
try:
|
|
89
|
+
data = json.loads(pkg_json.read_text(encoding="utf-8", errors="replace"))
|
|
90
|
+
publisher = data.get("publisher", "")
|
|
91
|
+
name = data.get("name", "")
|
|
92
|
+
version = data.get("version", "")
|
|
93
|
+
if publisher and name:
|
|
94
|
+
exts.append({"id": f"{publisher}.{name}", "version": version})
|
|
95
|
+
except (json.JSONDecodeError, OSError):
|
|
96
|
+
continue
|
|
97
|
+
# Deduplicate (multiple versions may be installed)
|
|
98
|
+
seen: dict[str, str] = {}
|
|
99
|
+
for ext in exts:
|
|
100
|
+
ext_id = ext["id"].lower()
|
|
101
|
+
if ext_id not in seen or ext["version"] > seen[ext_id]:
|
|
102
|
+
seen[ext_id] = ext["version"]
|
|
103
|
+
return [{"id": ext_id, "version": ver} for ext_id, ver in sorted(seen.items())]
|
|
104
|
+
|
|
105
|
+
async def _collect_jetbrains_plugins(self) -> list[dict[str, str]]:
|
|
106
|
+
"""Collect JetBrains IDE plugins from filesystem."""
|
|
107
|
+
plugins: list[dict[str, str]] = []
|
|
108
|
+
jetbrains_dir = Path.home() / "Library" / "Application Support" / "JetBrains"
|
|
109
|
+
if not jetbrains_dir.exists():
|
|
110
|
+
return plugins
|
|
111
|
+
|
|
112
|
+
for ide_dir in jetbrains_dir.iterdir():
|
|
113
|
+
if not ide_dir.is_dir():
|
|
114
|
+
continue
|
|
115
|
+
plugins_dir = ide_dir / "plugins"
|
|
116
|
+
if plugins_dir.exists():
|
|
117
|
+
for plugin_dir in plugins_dir.iterdir():
|
|
118
|
+
if plugin_dir.is_dir():
|
|
119
|
+
version = self._get_jetbrains_plugin_version(plugin_dir)
|
|
120
|
+
plugins.append({
|
|
121
|
+
"id": plugin_dir.name,
|
|
122
|
+
"ide": ide_dir.name,
|
|
123
|
+
"version": version,
|
|
124
|
+
})
|
|
125
|
+
return plugins
|
|
126
|
+
|
|
127
|
+
def _get_jetbrains_plugin_version(self, plugin_dir: Path) -> str:
|
|
128
|
+
"""Extract version from a JetBrains plugin's JAR META-INF/plugin.xml."""
|
|
129
|
+
lib_dir = plugin_dir / "lib"
|
|
130
|
+
if not lib_dir.exists():
|
|
131
|
+
return ""
|
|
132
|
+
|
|
133
|
+
for jar_path in lib_dir.glob("*.jar"):
|
|
134
|
+
try:
|
|
135
|
+
with zipfile.ZipFile(jar_path) as zf:
|
|
136
|
+
if "META-INF/plugin.xml" not in zf.namelist():
|
|
137
|
+
continue
|
|
138
|
+
plugin_xml = zf.read("META-INF/plugin.xml").decode("utf-8", errors="replace")
|
|
139
|
+
match = re.search(r"<version>([^<]+)</version>", plugin_xml)
|
|
140
|
+
if match:
|
|
141
|
+
return match.group(1)
|
|
142
|
+
except (zipfile.BadZipFile, OSError, KeyError):
|
|
143
|
+
continue
|
|
144
|
+
return ""
|