@jaguilar87/gaia-ops 2.2.1 → 2.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. package/CHANGELOG.md +74 -0
  2. package/config/embeddings_info.json +14 -0
  3. package/config/intent_embeddings.json +2002 -0
  4. package/config/intent_embeddings.npy +0 -0
  5. package/package.json +2 -1
  6. package/templates/CLAUDE.template.md +3 -11
  7. package/tests/README.en.md +224 -0
  8. package/tests/README.md +338 -0
  9. package/tests/fixtures/project-context.aws.json +53 -0
  10. package/tests/fixtures/project-context.gcp.json +53 -0
  11. package/tests/integration/RUN_TESTS.md +185 -0
  12. package/tests/integration/__init__.py +0 -0
  13. package/tests/integration/test_hooks_integration.py +473 -0
  14. package/tests/integration/test_hooks_workflow.py +397 -0
  15. package/tests/permissions-validation/MANUAL_VALIDATION.md +434 -0
  16. package/tests/permissions-validation/test_permissions_validation.py +527 -0
  17. package/tests/system/__init__.py +0 -0
  18. package/tests/system/permissions_helpers.py +318 -0
  19. package/tests/system/test_agent_definitions.py +166 -0
  20. package/tests/system/test_configuration_files.py +121 -0
  21. package/tests/system/test_directory_structure.py +231 -0
  22. package/tests/system/test_permissions_system.py +1006 -0
  23. package/tests/tools/__init__.py +0 -0
  24. package/tests/tools/test_agent_router.py +266 -0
  25. package/tests/tools/test_clarify_engine.py +413 -0
  26. package/tests/tools/test_context_provider.py +157 -0
  27. package/tests/validators/__init__.py +0 -0
  28. package/tests/validators/test_approval_gate.py +415 -0
  29. package/tests/validators/test_commit_validator.py +446 -0
  30. package/tools/context_provider.py +4 -4
  31. package/tools/generate_embeddings.py +3 -3
  32. package/tools/semantic_matcher.py +2 -2
@@ -0,0 +1,318 @@
1
+ """
2
+ Helper utilities for permissions system tests.
3
+
4
+ Provides functions for:
5
+ - Loading project and shared settings
6
+ - Merging settings with proper precedence
7
+ - Finding Claude configuration directories
8
+ - Detecting environment mode (production vs development)
9
+ """
10
+
11
+ import json
12
+ from pathlib import Path
13
+ from typing import Dict, Any, Optional, List
14
+ import re
15
+
16
+
17
+ def load_project_settings(project_root: Path) -> Optional[Dict[str, Any]]:
18
+ """
19
+ Load project-specific settings.json.
20
+
21
+ Args:
22
+ project_root: Root directory of the project
23
+
24
+ Returns:
25
+ Dict of settings, or None if not found
26
+ """
27
+ settings_path = project_root / ".claude" / "settings.json"
28
+
29
+ if not settings_path.exists():
30
+ return None
31
+
32
+ try:
33
+ with open(settings_path, 'r') as f:
34
+ return json.load(f)
35
+ except (json.JSONDecodeError, IOError) as e:
36
+ print(f"Error loading project settings: {e}")
37
+ return None
38
+
39
+
40
+ def load_shared_settings(shared_root: Path) -> Optional[Dict[str, Any]]:
41
+ """
42
+ Load shared settings.json from .claude-shared.
43
+
44
+ Args:
45
+ shared_root: Root directory of .claude-shared
46
+
47
+ Returns:
48
+ Dict of settings, or None if not found
49
+ """
50
+ settings_path = shared_root / ".claude" / "settings.json"
51
+
52
+ if not settings_path.exists():
53
+ return None
54
+
55
+ try:
56
+ with open(settings_path, 'r') as f:
57
+ return json.load(f)
58
+ except (json.JSONDecodeError, IOError) as e:
59
+ print(f"Error loading shared settings: {e}")
60
+ return None
61
+
62
+
63
+ def merge_settings(project: Dict[str, Any], shared: Dict[str, Any]) -> Dict[str, Any]:
64
+ """
65
+ Merge project and shared settings with proper precedence.
66
+
67
+ Rules:
68
+ - Project settings override shared settings for same keys
69
+ - Nested dicts are merged recursively
70
+ - Lists are replaced (not merged)
71
+
72
+ Args:
73
+ project: Project-specific settings
74
+ shared: Shared settings
75
+
76
+ Returns:
77
+ Merged settings dict
78
+ """
79
+ if not project:
80
+ return shared.copy() if shared else {}
81
+
82
+ if not shared:
83
+ return project.copy()
84
+
85
+ # Start with a copy of shared
86
+ result = shared.copy()
87
+
88
+ # Recursively merge project settings
89
+ for key, value in project.items():
90
+ if key in result and isinstance(result[key], dict) and isinstance(value, dict):
91
+ # Recursively merge nested dicts
92
+ result[key] = merge_settings(value, result[key])
93
+ else:
94
+ # Replace value (project overrides shared)
95
+ result[key] = value
96
+
97
+ return result
98
+
99
+
100
+ def find_claude_config(project_root: Path) -> Optional[Path]:
101
+ """
102
+ Find .claude configuration directory.
103
+
104
+ Args:
105
+ project_root: Root directory to search from
106
+
107
+ Returns:
108
+ Path to .claude directory, or None if not found
109
+ """
110
+ claude_dir = project_root / ".claude"
111
+
112
+ if claude_dir.exists() and claude_dir.is_dir():
113
+ return claude_dir
114
+
115
+ # Try parent directories (up to 3 levels)
116
+ for parent in [project_root.parent, project_root.parent.parent]:
117
+ claude_dir = parent / ".claude"
118
+ if claude_dir.exists() and claude_dir.is_dir():
119
+ return claude_dir
120
+
121
+ return None
122
+
123
+
124
+ def get_environment_mode(project_root: Path) -> str:
125
+ """
126
+ Detect environment mode (production vs development).
127
+
128
+ Checks for indicators like:
129
+ - Project path contains "prod", "production"
130
+ - Project path contains "dev", "development"
131
+ - Environment variable CLAUDE_ENV
132
+ - settings.json "environment" field
133
+
134
+ Args:
135
+ project_root: Root directory of the project
136
+
137
+ Returns:
138
+ "production" or "development" (defaults to "development")
139
+ """
140
+ import os
141
+
142
+ # Check environment variable
143
+ env = os.getenv("CLAUDE_ENV", "").lower()
144
+ if "prod" in env:
145
+ return "production"
146
+ if "dev" in env:
147
+ return "development"
148
+
149
+ # Check project path
150
+ path_str = str(project_root).lower()
151
+ if "prod" in path_str or "production" in path_str:
152
+ return "production"
153
+ if "dev" in path_str or "development" in path_str:
154
+ return "development"
155
+
156
+ # Check settings.json
157
+ settings = load_project_settings(project_root)
158
+ if settings and "environment" in settings:
159
+ env_setting = settings["environment"].lower()
160
+ if "prod" in env_setting:
161
+ return "production"
162
+ if "dev" in env_setting:
163
+ return "development"
164
+
165
+ # Default to development (safer)
166
+ return "development"
167
+
168
+
169
+ def load_merged_settings(project_root: Path, shared_root: Path) -> Dict[str, Any]:
170
+ """
171
+ Load and merge project + shared settings in one call.
172
+
173
+ Args:
174
+ project_root: Root directory of the project
175
+ shared_root: Root directory of .claude-shared
176
+
177
+ Returns:
178
+ Merged settings dict
179
+ """
180
+ project_settings = load_project_settings(project_root) or {}
181
+ shared_settings = load_shared_settings(shared_root) or {}
182
+
183
+ return merge_settings(project_settings, shared_settings)
184
+
185
+
186
+
187
+
188
+ def matches_any_pattern(command: str, patterns: List[str]) -> bool:
189
+ """
190
+ Check if command matches any pattern in the list.
191
+
192
+ Supports:
193
+ - Simple substring matching
194
+ - Wildcard patterns with * (converted to regex)
195
+ - Regex patterns (if they contain regex special chars)
196
+
197
+ Args:
198
+ command: Command to check
199
+ patterns: List of patterns to match against
200
+
201
+ Returns:
202
+ True if command matches any pattern
203
+ """
204
+ for pattern in patterns:
205
+ # Simple substring match
206
+ if pattern in command:
207
+ return True
208
+
209
+ # Wildcard pattern (convert * to .*)
210
+ if '*' in pattern:
211
+ regex_pattern = pattern.replace('*', '.*')
212
+ if re.search(regex_pattern, command, re.IGNORECASE):
213
+ return True
214
+
215
+ # Try as regex pattern
216
+ try:
217
+ if re.search(pattern, command, re.IGNORECASE):
218
+ return True
219
+ except re.error:
220
+ # Not a valid regex, skip
221
+ pass
222
+
223
+ return False
224
+
225
+
226
+ def get_permission_decision(command: str, tool: str, settings: Dict[str, Any]) -> str:
227
+ """
228
+ Get permission decision for a command using priority rules.
229
+
230
+ Priority: deny > ask > allow > default_deny
231
+
232
+ Args:
233
+ command: Command to check
234
+ tool: Tool name (e.g., "bash")
235
+ settings: Merged settings dict
236
+
237
+ Returns:
238
+ "deny", "ask", "allow", or "default_deny"
239
+ """
240
+ if "permissions" not in settings:
241
+ return "default_deny"
242
+
243
+ if tool not in settings["permissions"]:
244
+ return "default_deny"
245
+
246
+ tool_permissions = settings["permissions"][tool]
247
+
248
+ # Priority 1: Check deny list (highest priority)
249
+ deny_patterns = tool_permissions.get("deny", [])
250
+ if matches_any_pattern(command, deny_patterns):
251
+ return "deny"
252
+
253
+ # Priority 2: Check ask dict (medium priority)
254
+ ask_patterns = list(tool_permissions.get("ask", {}).keys())
255
+ if matches_any_pattern(command, ask_patterns):
256
+ return "ask"
257
+
258
+ # Priority 3: Check allow list (lowest priority)
259
+ allow_patterns = tool_permissions.get("allow", [])
260
+ if matches_any_pattern(command, allow_patterns):
261
+ return "allow"
262
+
263
+ # Default: deny if not explicitly allowed
264
+ return "default_deny"
265
+
266
+
267
+ def get_permission_level(settings: Dict[str, Any], tool: str, command: str) -> str:
268
+ """
269
+ Determine permission level for a command.
270
+
271
+ Priority: deny > ask > allow > default_deny
272
+
273
+ Args:
274
+ settings: Merged settings dict
275
+ tool: Tool name (e.g., "bash")
276
+ command: Command to check
277
+
278
+ Returns:
279
+ "deny", "ask", "allow", or "default_deny"
280
+ """
281
+ return get_permission_decision(command, tool, settings)
282
+
283
+
284
+ def validate_settings_schema(settings: Dict[str, Any]) -> bool:
285
+ """
286
+ Validate that settings dict has correct structure.
287
+
288
+ Args:
289
+ settings: Settings dict to validate
290
+
291
+ Returns:
292
+ True if valid, False otherwise
293
+ """
294
+ if not isinstance(settings, dict):
295
+ return False
296
+
297
+ # Check for required top-level keys
298
+ if "permissions" in settings:
299
+ permissions = settings["permissions"]
300
+ if not isinstance(permissions, dict):
301
+ return False
302
+
303
+ # Validate each tool's permissions
304
+ for tool, perms in permissions.items():
305
+ if not isinstance(perms, dict):
306
+ return False
307
+
308
+ # Check that deny/allow are lists
309
+ if "deny" in perms and not isinstance(perms["deny"], list):
310
+ return False
311
+ if "allow" in perms and not isinstance(perms["allow"], list):
312
+ return False
313
+
314
+ # Check that ask is a dict
315
+ if "ask" in perms and not isinstance(perms["ask"], dict):
316
+ return False
317
+
318
+ return True
@@ -0,0 +1,166 @@
1
+ """
2
+ Test suite for agent definition files
3
+ Validates agent prompts have required sections and structure
4
+ """
5
+
6
+ import pytest
7
+ from pathlib import Path
8
+
9
+
10
+ class TestAgentStructure:
11
+ """Test that agent files have proper structure"""
12
+
13
+ @pytest.fixture
14
+ def agents_dir(self):
15
+ """Get the agents directory path"""
16
+ agents = Path(__file__).resolve().parents[2] / "agents"
17
+ return agents.resolve() if agents.is_symlink() else agents
18
+
19
+ @pytest.fixture
20
+ def all_agents(self, agents_dir):
21
+ """Get all agent markdown files"""
22
+ return list(agents_dir.glob("*.md"))
23
+
24
+ def test_all_agents_have_title(self, all_agents):
25
+ """All agents should have a title heading"""
26
+ for agent_file in all_agents:
27
+ content = agent_file.read_text()
28
+ # Check for any heading (# or ##)
29
+ has_heading = any(line.strip().startswith('#') for line in content.split('\n'))
30
+ assert has_heading, \
31
+ f"{agent_file.name} should have at least one heading"
32
+
33
+ def test_all_agents_have_role_section(self, all_agents):
34
+ """All agents should define their role"""
35
+ for agent_file in all_agents:
36
+ content = agent_file.read_text()
37
+ # Check for role-related sections or keywords
38
+ role_indicators = [
39
+ "## Role", "## Primary Role", "## Core Identity",
40
+ "role", "responsibility", "specialize"
41
+ ]
42
+ has_role = any(indicator.lower() in content.lower() for indicator in role_indicators)
43
+ assert has_role, f"{agent_file.name} missing role definition"
44
+
45
+ def test_all_agents_have_capabilities(self, all_agents):
46
+ """All agents should list their capabilities"""
47
+ for agent_file in all_agents:
48
+ content = agent_file.read_text()
49
+ capability_indicators = [
50
+ "## Capabilities",
51
+ "## Core Capabilities",
52
+ "## What I Do",
53
+ "## Responsibilities",
54
+ "capabilities",
55
+ "can do",
56
+ "will handle"
57
+ ]
58
+ has_capabilities = any(ind.lower() in content.lower() for ind in capability_indicators)
59
+ assert has_capabilities, \
60
+ f"{agent_file.name} missing capabilities section"
61
+
62
+ def test_all_agents_have_workflow(self, all_agents):
63
+ """All agents should document their workflow"""
64
+ for agent_file in all_agents:
65
+ content = agent_file.read_text()
66
+ workflow_indicators = [
67
+ "## Workflow",
68
+ "## Operating Protocol",
69
+ "## Process",
70
+ "## Execution Protocol",
71
+ "workflow",
72
+ "protocol",
73
+ "procedure",
74
+ "steps"
75
+ ]
76
+ has_workflow = any(ind.lower() in content.lower() for ind in workflow_indicators)
77
+ assert has_workflow, f"{agent_file.name} missing workflow description"
78
+
79
+
80
+ class TestProjectAgents:
81
+ """Test project agent specific requirements"""
82
+
83
+ @pytest.fixture
84
+ def agents_dir(self):
85
+ """Get the agents directory path"""
86
+ agents = Path(__file__).resolve().parents[2] / "agents"
87
+ return agents.resolve() if agents.is_symlink() else agents
88
+
89
+ def test_terraform_architect_exists(self, agents_dir):
90
+ """terraform-architect.md must exist"""
91
+ tf_agent = agents_dir / "terraform-architect.md"
92
+ assert tf_agent.exists(), "terraform-architect.md not found"
93
+
94
+ def test_gitops_operator_exists(self, agents_dir):
95
+ """gitops-operator.md must exist"""
96
+ gitops_agent = agents_dir / "gitops-operator.md"
97
+ assert gitops_agent.exists(), "gitops-operator.md not found"
98
+
99
+ def test_gcp_troubleshooter_exists(self, agents_dir):
100
+ """gcp-troubleshooter.md must exist"""
101
+ gcp_agent = agents_dir / "gcp-troubleshooter.md"
102
+ assert gcp_agent.exists(), "gcp-troubleshooter.md not found"
103
+
104
+ def test_aws_troubleshooter_exists(self, agents_dir):
105
+ """aws-troubleshooter.md must exist"""
106
+ aws_agent = agents_dir / "aws-troubleshooter.md"
107
+ assert aws_agent.exists(), "aws-troubleshooter.md not found"
108
+
109
+ def test_devops_developer_exists(self, agents_dir):
110
+ """devops-developer.md must exist"""
111
+ devops_agent = agents_dir / "devops-developer.md"
112
+ assert devops_agent.exists(), "devops-developer.md not found"
113
+
114
+
115
+ class TestAgentSecurity:
116
+ """Test that agents document security tiers"""
117
+
118
+ @pytest.fixture
119
+ def agents_dir(self):
120
+ """Get the agents directory path"""
121
+ agents = Path(__file__).resolve().parents[2] / "agents"
122
+ return agents.resolve() if agents.is_symlink() else agents
123
+
124
+ def test_agents_document_security_tiers(self, agents_dir):
125
+ """Agents should document their security tier capabilities"""
126
+ for agent_file in agents_dir.glob("*.md"):
127
+ content = agent_file.read_text()
128
+
129
+ # Should mention security tiers or permissions
130
+ tier_indicators = ["T0", "T1", "T2", "T3", "tier", "security", "permission"]
131
+ mentions_tiers = any(indicator.lower() in content.lower() for indicator in tier_indicators)
132
+
133
+ assert mentions_tiers, \
134
+ f"{agent_file.name} should document security tier usage or permissions"
135
+
136
+
137
+ class TestAgentConsistency:
138
+ """Test consistency across agent definitions"""
139
+
140
+ @pytest.fixture
141
+ def agents_dir(self):
142
+ """Get the agents directory path"""
143
+ agents = Path(__file__).resolve().parents[2] / "agents"
144
+ return agents.resolve() if agents.is_symlink() else agents
145
+
146
+ def test_no_duplicate_agent_names(self, agents_dir):
147
+ """Agent names should be unique"""
148
+ agent_files = list(agents_dir.glob("*.md"))
149
+ agent_names = [f.stem for f in agent_files]
150
+
151
+ assert len(agent_names) == len(set(agent_names)), \
152
+ "Duplicate agent names detected"
153
+
154
+ def test_agent_naming_convention(self, agents_dir):
155
+ """Agent files should follow naming convention (kebab-case)"""
156
+ for agent_file in agents_dir.glob("*.md"):
157
+ name = agent_file.stem
158
+ # Should be lowercase with hyphens (or all lowercase)
159
+ assert name.islower() or "-" in name, \
160
+ f"{agent_file.name} should use kebab-case or lowercase naming"
161
+ assert " " not in name, \
162
+ f"{agent_file.name} should not contain spaces"
163
+
164
+
165
+ if __name__ == "__main__":
166
+ pytest.main([__file__, "-v"])
@@ -0,0 +1,121 @@
1
+ """
2
+ Test suite for configuration files
3
+ Validates settings.json, git_standards.json, and other configs
4
+ """
5
+
6
+ import pytest
7
+ import json
8
+ from pathlib import Path
9
+
10
+
11
+ class TestSettingsTemplate:
12
+ """Test templates/settings.template.json structure and validity"""
13
+
14
+ @pytest.fixture
15
+ def settings_path(self):
16
+ """Get settings template path (gaia-ops is a package, not installed project)"""
17
+ return Path(__file__).resolve().parents[2] / "templates" / "settings.template.json"
18
+
19
+ def test_settings_file_exists(self, settings_path):
20
+ """settings.template.json must exist in templates/"""
21
+ assert settings_path.exists(), f"settings.template.json not found at {settings_path}"
22
+
23
+ def test_settings_is_valid_json(self, settings_path):
24
+ """settings.template.json must be valid JSON"""
25
+ try:
26
+ with open(settings_path, 'r') as f:
27
+ json.load(f)
28
+ except json.JSONDecodeError as e:
29
+ pytest.fail(f"settings.template.json is not valid JSON: {e}")
30
+
31
+ def test_settings_has_required_sections(self, settings_path):
32
+ """settings.template.json should have required configuration sections"""
33
+ with open(settings_path, 'r') as f:
34
+ data = json.load(f)
35
+
36
+ # Check for core sections
37
+ assert 'hooks' in data, "settings.template.json missing hooks section"
38
+ assert 'permissions' in data, "settings.template.json missing permissions section"
39
+
40
+
41
+ class TestGitStandards:
42
+ """Test git_standards.json configuration"""
43
+
44
+ @pytest.fixture
45
+ def git_standards_path(self):
46
+ """Get git_standards.json path"""
47
+ return Path(__file__).resolve().parents[2] / "config" / "git_standards.json"
48
+
49
+ def test_git_standards_exists(self, git_standards_path):
50
+ """git_standards.json must exist"""
51
+ assert git_standards_path.exists(), "git_standards.json not found"
52
+
53
+ def test_git_standards_is_valid_json(self, git_standards_path):
54
+ """git_standards.json must be valid JSON"""
55
+ try:
56
+ with open(git_standards_path, 'r') as f:
57
+ json.load(f)
58
+ except json.JSONDecodeError as e:
59
+ pytest.fail(f"git_standards.json is not valid JSON: {e}")
60
+
61
+ def test_git_standards_has_commit_types(self, git_standards_path):
62
+ """git_standards.json should define allowed commit types"""
63
+ with open(git_standards_path, 'r') as f:
64
+ data = json.load(f)
65
+
66
+ # Structure is: data['commit_message']['type_allowed']
67
+ has_types = (
68
+ ('commit_message' in data and 'type_allowed' in data['commit_message']) or
69
+ 'commit_types' in data or
70
+ 'allowed_types' in data
71
+ )
72
+ assert has_types, "git_standards.json missing commit types"
73
+
74
+ def test_git_standards_has_forbidden_footers(self, git_standards_path):
75
+ """git_standards.json should define forbidden footers"""
76
+ with open(git_standards_path, 'r') as f:
77
+ data = json.load(f)
78
+
79
+ # Structure is: data['commit_message']['footer_forbidden']
80
+ has_forbidden = (
81
+ ('commit_message' in data and 'footer_forbidden' in data['commit_message']) or
82
+ 'forbidden_footers' in data or
83
+ 'blocked_footers' in data or
84
+ 'prohibited_footers' in data
85
+ )
86
+ assert has_forbidden, \
87
+ "git_standards.json missing forbidden footers config"
88
+
89
+
90
+ class TestConfigConsistency:
91
+ """Test consistency across configuration files"""
92
+
93
+ @pytest.fixture
94
+ def config_dir(self):
95
+ """Get config directory path"""
96
+ return Path(__file__).resolve().parents[2] / "config"
97
+
98
+ def test_all_json_files_valid(self, config_dir):
99
+ """All JSON files in config/ should be valid"""
100
+ if not config_dir.exists():
101
+ pytest.skip("config/ directory not found")
102
+
103
+ for json_file in config_dir.glob("*.json"):
104
+ try:
105
+ with open(json_file, 'r') as f:
106
+ json.load(f)
107
+ except json.JSONDecodeError as e:
108
+ pytest.fail(f"Invalid JSON in {json_file.name}: {e}")
109
+
110
+ def test_no_empty_config_files(self, config_dir):
111
+ """Config files should not be empty"""
112
+ if not config_dir.exists():
113
+ pytest.skip("config/ directory not found")
114
+
115
+ for config_file in config_dir.glob("*.json"):
116
+ size = config_file.stat().st_size
117
+ assert size > 10, f"{config_file.name} is too small or empty"
118
+
119
+
120
+ if __name__ == "__main__":
121
+ pytest.main([__file__, "-v"])