aes-cli 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aes/__init__.py +5 -0
- aes/__main__.py +37 -0
- aes/analyzer.py +487 -0
- aes/commands/__init__.py +0 -0
- aes/commands/init.py +727 -0
- aes/commands/inspect.py +204 -0
- aes/commands/install.py +379 -0
- aes/commands/publish.py +432 -0
- aes/commands/search.py +65 -0
- aes/commands/status.py +153 -0
- aes/commands/sync.py +413 -0
- aes/commands/validate.py +77 -0
- aes/config.py +43 -0
- aes/domains.py +1382 -0
- aes/frameworks.py +522 -0
- aes/mcp_server.py +213 -0
- aes/registry.py +294 -0
- aes/scaffold/agent.yaml.jinja +135 -0
- aes/scaffold/agentignore.jinja +61 -0
- aes/scaffold/instructions.md.jinja +311 -0
- aes/scaffold/local.example.yaml.jinja +35 -0
- aes/scaffold/local.yaml.jinja +29 -0
- aes/scaffold/operations.md.jinja +33 -0
- aes/scaffold/orchestrator.md.jinja +95 -0
- aes/scaffold/permissions.yaml.jinja +151 -0
- aes/scaffold/setup.md.jinja +244 -0
- aes/scaffold/skill.md.jinja +27 -0
- aes/scaffold/skill.yaml.jinja +175 -0
- aes/scaffold/workflow.yaml.jinja +44 -0
- aes/scaffold/workflow_command.md.jinja +48 -0
- aes/schemas/agent.schema.json +188 -0
- aes/schemas/permissions.schema.json +100 -0
- aes/schemas/registry.schema.json +72 -0
- aes/schemas/skill.schema.json +209 -0
- aes/schemas/workflow.schema.json +92 -0
- aes/targets/__init__.py +29 -0
- aes/targets/_base.py +77 -0
- aes/targets/_composer.py +338 -0
- aes/targets/claude.py +153 -0
- aes/targets/copilot.py +48 -0
- aes/targets/cursor.py +46 -0
- aes/targets/windsurf.py +46 -0
- aes/validator.py +394 -0
- aes_cli-0.2.0.dist-info/METADATA +110 -0
- aes_cli-0.2.0.dist-info/RECORD +48 -0
- aes_cli-0.2.0.dist-info/WHEEL +5 -0
- aes_cli-0.2.0.dist-info/entry_points.txt +3 -0
- aes_cli-0.2.0.dist-info/top_level.txt +1 -0
aes/commands/sync.py
ADDED
|
@@ -0,0 +1,413 @@
|
|
|
1
|
+
"""aes sync \u2014 Generate tool-specific config files from .agent/ directory."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import hashlib
|
|
6
|
+
import json
|
|
7
|
+
import sys
|
|
8
|
+
from datetime import datetime, timezone
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Any, Dict, List, Optional
|
|
11
|
+
|
|
12
|
+
import click
|
|
13
|
+
import yaml
|
|
14
|
+
from rich.console import Console
|
|
15
|
+
|
|
16
|
+
from aes.config import (
|
|
17
|
+
AGENT_DIR,
|
|
18
|
+
COMMANDS_DIR,
|
|
19
|
+
LOCAL_FILE,
|
|
20
|
+
MANIFEST_FILE,
|
|
21
|
+
MEMORY_DIR,
|
|
22
|
+
PERMISSIONS_FILE,
|
|
23
|
+
SKILLS_DIR,
|
|
24
|
+
)
|
|
25
|
+
from aes.targets import TARGETS, TARGET_NAMES, AgentContext, SyncPlan
|
|
26
|
+
|
|
27
|
+
console = Console()
|
|
28
|
+
|
|
29
|
+
SYNC_MANIFEST = ".aes-sync.json"
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def run_sync(
|
|
33
|
+
project_root: Path,
|
|
34
|
+
target_names: Optional[List[str]] = None,
|
|
35
|
+
force: bool = False,
|
|
36
|
+
quiet: bool = False,
|
|
37
|
+
) -> int:
|
|
38
|
+
"""Run sync programmatically. Returns number of files written.
|
|
39
|
+
|
|
40
|
+
Used by ``aes init`` to auto-sync after scaffolding.
|
|
41
|
+
"""
|
|
42
|
+
agent_dir = project_root / AGENT_DIR
|
|
43
|
+
|
|
44
|
+
if not agent_dir.exists() or not (agent_dir / MANIFEST_FILE).exists():
|
|
45
|
+
return 0
|
|
46
|
+
|
|
47
|
+
ctx = _load_agent_context(project_root)
|
|
48
|
+
selected = target_names or TARGET_NAMES
|
|
49
|
+
|
|
50
|
+
all_plans: List[SyncPlan] = []
|
|
51
|
+
for name in selected:
|
|
52
|
+
adapter = TARGETS[name]()
|
|
53
|
+
all_plans.append(adapter.plan(ctx, force))
|
|
54
|
+
|
|
55
|
+
sync_manifest = _load_sync_manifest(project_root)
|
|
56
|
+
written = 0
|
|
57
|
+
|
|
58
|
+
for sync_plan in all_plans:
|
|
59
|
+
for gf in sync_plan.files:
|
|
60
|
+
if gf.action in ("create", "update"):
|
|
61
|
+
full_path = project_root / gf.relative_path
|
|
62
|
+
full_path.parent.mkdir(parents=True, exist_ok=True)
|
|
63
|
+
full_path.write_text(gf.content)
|
|
64
|
+
sync_manifest["files"][gf.relative_path] = {
|
|
65
|
+
"target": sync_plan.target_name,
|
|
66
|
+
"sha256": _sha256(gf.content),
|
|
67
|
+
}
|
|
68
|
+
written += 1
|
|
69
|
+
|
|
70
|
+
if written > 0:
|
|
71
|
+
sync_manifest["synced_at"] = datetime.now(timezone.utc).isoformat()
|
|
72
|
+
_save_sync_manifest(project_root, sync_manifest)
|
|
73
|
+
|
|
74
|
+
return written
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def _deep_merge(base: dict, override: dict) -> dict:
|
|
78
|
+
"""Deep-merge *override* into *base*.
|
|
79
|
+
|
|
80
|
+
Lists are extended (not replaced). Scalars from *override* win.
|
|
81
|
+
Returns a new dict — neither input is mutated.
|
|
82
|
+
"""
|
|
83
|
+
merged = dict(base)
|
|
84
|
+
for key, val in override.items():
|
|
85
|
+
if key in merged and isinstance(merged[key], dict) and isinstance(val, dict):
|
|
86
|
+
merged[key] = _deep_merge(merged[key], val)
|
|
87
|
+
elif key in merged and isinstance(merged[key], list) and isinstance(val, list):
|
|
88
|
+
merged[key] = merged[key] + val
|
|
89
|
+
else:
|
|
90
|
+
merged[key] = val
|
|
91
|
+
return merged
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def _load_agent_context(project_root: Path) -> AgentContext:
|
|
95
|
+
"""Load all .agent/ contents into an AgentContext."""
|
|
96
|
+
agent_dir = project_root / AGENT_DIR
|
|
97
|
+
|
|
98
|
+
# Load manifest
|
|
99
|
+
with open(agent_dir / MANIFEST_FILE) as f:
|
|
100
|
+
manifest = yaml.safe_load(f) or {}
|
|
101
|
+
|
|
102
|
+
agent_section = manifest.get("agent", {})
|
|
103
|
+
|
|
104
|
+
# Load instructions
|
|
105
|
+
instructions: Optional[str] = None
|
|
106
|
+
instructions_rel = agent_section.get("instructions", "instructions.md")
|
|
107
|
+
instructions_path = agent_dir / instructions_rel
|
|
108
|
+
if instructions_path.exists():
|
|
109
|
+
instructions = instructions_path.read_text()
|
|
110
|
+
|
|
111
|
+
# Load orchestrator
|
|
112
|
+
orchestrator: Optional[str] = None
|
|
113
|
+
orchestrator_rel = agent_section.get("orchestrator")
|
|
114
|
+
if orchestrator_rel:
|
|
115
|
+
orchestrator_path = agent_dir / orchestrator_rel
|
|
116
|
+
if orchestrator_path.exists():
|
|
117
|
+
orchestrator = orchestrator_path.read_text()
|
|
118
|
+
|
|
119
|
+
# Load skill runbooks and metadata in manifest order
|
|
120
|
+
skill_runbooks: Dict[str, str] = {}
|
|
121
|
+
skill_metadata: Dict[str, Dict[str, Any]] = {}
|
|
122
|
+
for skill_ref in manifest.get("skills", []):
|
|
123
|
+
skill_id = skill_ref.get("id", "unknown")
|
|
124
|
+
runbook_rel = skill_ref.get("runbook")
|
|
125
|
+
if runbook_rel:
|
|
126
|
+
runbook_path = agent_dir / runbook_rel
|
|
127
|
+
if runbook_path.exists():
|
|
128
|
+
skill_runbooks[skill_id] = runbook_path.read_text()
|
|
129
|
+
# Load skill manifest for name/description/activation metadata
|
|
130
|
+
manifest_rel = skill_ref.get("manifest")
|
|
131
|
+
if manifest_rel:
|
|
132
|
+
skill_manifest_path = agent_dir / manifest_rel
|
|
133
|
+
if skill_manifest_path.exists():
|
|
134
|
+
with open(skill_manifest_path) as f:
|
|
135
|
+
skill_data = yaml.safe_load(f) or {}
|
|
136
|
+
skill_metadata[skill_id] = {
|
|
137
|
+
"name": skill_data.get("name", skill_id),
|
|
138
|
+
"description": skill_data.get("description", ""),
|
|
139
|
+
"negative_triggers": skill_data.get("negative_triggers", []),
|
|
140
|
+
"activation": skill_data.get("activation", "explicit"),
|
|
141
|
+
"allowed_tools": skill_data.get("allowed_tools"),
|
|
142
|
+
}
|
|
143
|
+
if skill_id not in skill_metadata:
|
|
144
|
+
skill_metadata[skill_id] = {
|
|
145
|
+
"name": skill_id,
|
|
146
|
+
"description": "",
|
|
147
|
+
"negative_triggers": [],
|
|
148
|
+
"activation": "explicit",
|
|
149
|
+
"allowed_tools": None,
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
# Load permissions
|
|
153
|
+
permissions: Optional[dict] = None
|
|
154
|
+
permissions_rel = agent_section.get("permissions", PERMISSIONS_FILE)
|
|
155
|
+
permissions_path = agent_dir / permissions_rel
|
|
156
|
+
if permissions_path.exists():
|
|
157
|
+
with open(permissions_path) as f:
|
|
158
|
+
permissions = yaml.safe_load(f) or {}
|
|
159
|
+
|
|
160
|
+
# Load commands with file content
|
|
161
|
+
commands: List[dict] = []
|
|
162
|
+
for cmd_ref in manifest.get("commands", []):
|
|
163
|
+
cmd_data = dict(cmd_ref)
|
|
164
|
+
cmd_path = agent_dir / cmd_ref["path"]
|
|
165
|
+
if cmd_path.exists():
|
|
166
|
+
cmd_data["content"] = cmd_path.read_text()
|
|
167
|
+
else:
|
|
168
|
+
cmd_data["content"] = (
|
|
169
|
+
f"# Command: /{cmd_ref.get('id', '?')}\n\nCommand file not found.\n"
|
|
170
|
+
)
|
|
171
|
+
commands.append(cmd_data)
|
|
172
|
+
|
|
173
|
+
# Load memory/project.md
|
|
174
|
+
memory_project: Optional[str] = None
|
|
175
|
+
memory_path = agent_dir / MEMORY_DIR / "project.md"
|
|
176
|
+
if memory_path.exists():
|
|
177
|
+
memory_project = memory_path.read_text()
|
|
178
|
+
|
|
179
|
+
# Load local.yaml and deep-merge permissions
|
|
180
|
+
local_config: Optional[dict] = None
|
|
181
|
+
local_path = agent_dir / LOCAL_FILE
|
|
182
|
+
if local_path.exists():
|
|
183
|
+
with open(local_path) as f:
|
|
184
|
+
local_config = yaml.safe_load(f) or {}
|
|
185
|
+
# Merge local permissions on top of shared permissions
|
|
186
|
+
local_perms = local_config.get("permissions")
|
|
187
|
+
if local_perms and permissions:
|
|
188
|
+
permissions = _deep_merge(permissions, local_perms)
|
|
189
|
+
|
|
190
|
+
return AgentContext(
|
|
191
|
+
project_root=project_root,
|
|
192
|
+
agent_dir=agent_dir,
|
|
193
|
+
manifest=manifest,
|
|
194
|
+
instructions=instructions,
|
|
195
|
+
orchestrator=orchestrator,
|
|
196
|
+
skill_runbooks=skill_runbooks,
|
|
197
|
+
permissions=permissions,
|
|
198
|
+
commands=commands,
|
|
199
|
+
memory_project=memory_project,
|
|
200
|
+
skill_metadata=skill_metadata,
|
|
201
|
+
local_config=local_config,
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def _load_sync_manifest(project_root: Path) -> dict:
|
|
206
|
+
"""Load .aes-sync.json if it exists."""
|
|
207
|
+
path = project_root / SYNC_MANIFEST
|
|
208
|
+
if path.exists():
|
|
209
|
+
with open(path) as f:
|
|
210
|
+
return json.load(f)
|
|
211
|
+
return {"files": {}, "synced_at": None}
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def _save_sync_manifest(project_root: Path, data: dict) -> None:
|
|
215
|
+
"""Save .aes-sync.json."""
|
|
216
|
+
path = project_root / SYNC_MANIFEST
|
|
217
|
+
with open(path, "w") as f:
|
|
218
|
+
json.dump(data, f, indent=2)
|
|
219
|
+
f.write("\n")
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
def _sha256(content: str) -> str:
|
|
223
|
+
return hashlib.sha256(content.encode()).hexdigest()[:16]
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
@click.command("sync")
|
|
227
|
+
@click.argument("path", default=".", type=click.Path(exists=True))
|
|
228
|
+
@click.option(
|
|
229
|
+
"--target",
|
|
230
|
+
"-t",
|
|
231
|
+
multiple=True,
|
|
232
|
+
type=click.Choice(TARGET_NAMES, case_sensitive=False),
|
|
233
|
+
help="Target tool(s) to sync. Repeatable. Default: all.",
|
|
234
|
+
)
|
|
235
|
+
@click.option("--dry-run", is_flag=True, help="Show what would be generated without writing.")
|
|
236
|
+
@click.option("--force", is_flag=True, help="Overwrite files not generated by aes sync.")
|
|
237
|
+
@click.option("--clean", is_flag=True, help="Remove previously synced files.")
|
|
238
|
+
def sync_cmd(
|
|
239
|
+
path: str,
|
|
240
|
+
target: tuple, # type: ignore[type-arg]
|
|
241
|
+
dry_run: bool,
|
|
242
|
+
force: bool,
|
|
243
|
+
clean: bool,
|
|
244
|
+
) -> None:
|
|
245
|
+
"""Generate tool-specific config files from .agent/ directory.
|
|
246
|
+
|
|
247
|
+
Reads .agent/ and generates configuration files for AI coding tools
|
|
248
|
+
(Claude Code, Cursor, Copilot, Windsurf).
|
|
249
|
+
|
|
250
|
+
PATH is the project root directory (default: current directory).
|
|
251
|
+
"""
|
|
252
|
+
project_root = Path(path).resolve()
|
|
253
|
+
agent_dir = project_root / AGENT_DIR
|
|
254
|
+
|
|
255
|
+
if not agent_dir.exists():
|
|
256
|
+
console.print(f"[red]Error:[/] No {AGENT_DIR}/ directory found at {project_root}")
|
|
257
|
+
console.print("[dim]Run 'aes init' to create one.[/]")
|
|
258
|
+
raise SystemExit(1)
|
|
259
|
+
|
|
260
|
+
if not (agent_dir / MANIFEST_FILE).exists():
|
|
261
|
+
console.print(f"[red]Error:[/] No {MANIFEST_FILE} found in {agent_dir}")
|
|
262
|
+
raise SystemExit(1)
|
|
263
|
+
|
|
264
|
+
# Handle --clean
|
|
265
|
+
if clean:
|
|
266
|
+
_do_clean(project_root, dry_run)
|
|
267
|
+
return
|
|
268
|
+
|
|
269
|
+
# Load context
|
|
270
|
+
ctx = _load_agent_context(project_root)
|
|
271
|
+
|
|
272
|
+
# Select targets
|
|
273
|
+
if target:
|
|
274
|
+
selected = list(target)
|
|
275
|
+
elif sys.stdin.isatty():
|
|
276
|
+
selected = _prompt_target_selection()
|
|
277
|
+
else:
|
|
278
|
+
selected = list(TARGET_NAMES)
|
|
279
|
+
console.print(f"[bold]Syncing[/] {project_root}")
|
|
280
|
+
console.print(f" Targets: {', '.join(selected)}")
|
|
281
|
+
console.print()
|
|
282
|
+
|
|
283
|
+
# Generate plans
|
|
284
|
+
all_plans: List[SyncPlan] = []
|
|
285
|
+
for target_name in selected:
|
|
286
|
+
adapter = TARGETS[target_name]()
|
|
287
|
+
sync_plan = adapter.plan(ctx, force)
|
|
288
|
+
all_plans.append(sync_plan)
|
|
289
|
+
|
|
290
|
+
# Execute plans
|
|
291
|
+
sync_manifest = _load_sync_manifest(project_root)
|
|
292
|
+
created = 0
|
|
293
|
+
updated = 0
|
|
294
|
+
conflicts = 0
|
|
295
|
+
|
|
296
|
+
for sync_plan in all_plans:
|
|
297
|
+
if sync_plan.files:
|
|
298
|
+
console.print(f"[bold cyan]{sync_plan.target_name}[/]")
|
|
299
|
+
|
|
300
|
+
for gf in sync_plan.files:
|
|
301
|
+
if gf.action == "create":
|
|
302
|
+
icon = "[green]+[/]"
|
|
303
|
+
created += 1
|
|
304
|
+
elif gf.action == "update":
|
|
305
|
+
icon = "[yellow]~[/]"
|
|
306
|
+
updated += 1
|
|
307
|
+
else:
|
|
308
|
+
icon = "[red]![/]"
|
|
309
|
+
conflicts += 1
|
|
310
|
+
|
|
311
|
+
console.print(f" {icon} {gf.relative_path} [dim]({gf.description})[/]")
|
|
312
|
+
|
|
313
|
+
if gf.action == "conflict":
|
|
314
|
+
console.print(
|
|
315
|
+
" [red]Conflict:[/] file exists and was not generated by aes sync."
|
|
316
|
+
)
|
|
317
|
+
console.print(" [dim]Use --force to overwrite.[/]")
|
|
318
|
+
continue
|
|
319
|
+
|
|
320
|
+
if gf.action in ("create", "update") and not dry_run:
|
|
321
|
+
full_path = project_root / gf.relative_path
|
|
322
|
+
full_path.parent.mkdir(parents=True, exist_ok=True)
|
|
323
|
+
full_path.write_text(gf.content)
|
|
324
|
+
sync_manifest["files"][gf.relative_path] = {
|
|
325
|
+
"target": sync_plan.target_name,
|
|
326
|
+
"sha256": _sha256(gf.content),
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
for warning in sync_plan.warnings:
|
|
330
|
+
console.print(f" [yellow]Warning:[/] {warning}")
|
|
331
|
+
|
|
332
|
+
if sync_plan.files:
|
|
333
|
+
console.print()
|
|
334
|
+
|
|
335
|
+
# Save manifest
|
|
336
|
+
if not dry_run and (created + updated > 0):
|
|
337
|
+
sync_manifest["synced_at"] = datetime.now(timezone.utc).isoformat()
|
|
338
|
+
_save_sync_manifest(project_root, sync_manifest)
|
|
339
|
+
|
|
340
|
+
# Summary
|
|
341
|
+
if dry_run:
|
|
342
|
+
console.print("[dim]Dry run \u2014 no files written.[/]")
|
|
343
|
+
console.print(
|
|
344
|
+
f"[bold]Summary:[/] {created} created, {updated} updated, {conflicts} conflicts"
|
|
345
|
+
)
|
|
346
|
+
|
|
347
|
+
if conflicts > 0:
|
|
348
|
+
raise SystemExit(1)
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
def _do_clean(project_root: Path, dry_run: bool) -> None:
|
|
352
|
+
"""Remove all previously synced files."""
|
|
353
|
+
sync_manifest = _load_sync_manifest(project_root)
|
|
354
|
+
files = sync_manifest.get("files", {})
|
|
355
|
+
|
|
356
|
+
if not files:
|
|
357
|
+
console.print("[dim]No synced files found.[/]")
|
|
358
|
+
return
|
|
359
|
+
|
|
360
|
+
removed = 0
|
|
361
|
+
for rel_path in list(files.keys()):
|
|
362
|
+
full_path = project_root / rel_path
|
|
363
|
+
if full_path.exists():
|
|
364
|
+
console.print(f" [red]-[/] {rel_path}")
|
|
365
|
+
if not dry_run:
|
|
366
|
+
full_path.unlink()
|
|
367
|
+
removed += 1
|
|
368
|
+
else:
|
|
369
|
+
console.print(f" [dim]-[/] {rel_path} [dim](already gone)[/]")
|
|
370
|
+
|
|
371
|
+
if not dry_run:
|
|
372
|
+
_save_sync_manifest(project_root, {"files": {}, "synced_at": None})
|
|
373
|
+
|
|
374
|
+
if dry_run:
|
|
375
|
+
console.print(f"[dim]Dry run \u2014 would remove {len(files)} file(s).[/]")
|
|
376
|
+
else:
|
|
377
|
+
console.print(f"[green]Cleaned {removed} file(s).[/]")
|
|
378
|
+
|
|
379
|
+
|
|
380
|
+
def _prompt_target_selection() -> List[str]:
|
|
381
|
+
"""Interactively prompt the user to select sync target(s)."""
|
|
382
|
+
console.print("[bold]Select target(s) to sync:[/]\n")
|
|
383
|
+
for i, name in enumerate(TARGET_NAMES, 1):
|
|
384
|
+
console.print(f" [bold cyan][{i}][/] {name}")
|
|
385
|
+
all_idx = len(TARGET_NAMES) + 1
|
|
386
|
+
console.print(f" [bold cyan][{all_idx}][/] all")
|
|
387
|
+
console.print()
|
|
388
|
+
|
|
389
|
+
raw = click.prompt(
|
|
390
|
+
"Choice (comma-separated for multiple, e.g. 1,2)",
|
|
391
|
+
type=str,
|
|
392
|
+
default=str(all_idx),
|
|
393
|
+
)
|
|
394
|
+
|
|
395
|
+
choices = [c.strip() for c in raw.split(",")]
|
|
396
|
+
selected: List[str] = []
|
|
397
|
+
for c in choices:
|
|
398
|
+
try:
|
|
399
|
+
idx = int(c)
|
|
400
|
+
except ValueError:
|
|
401
|
+
if c.lower() in TARGET_NAMES:
|
|
402
|
+
selected.append(c.lower())
|
|
403
|
+
continue
|
|
404
|
+
if idx == all_idx:
|
|
405
|
+
return list(TARGET_NAMES)
|
|
406
|
+
if 1 <= idx <= len(TARGET_NAMES):
|
|
407
|
+
selected.append(TARGET_NAMES[idx - 1])
|
|
408
|
+
|
|
409
|
+
if not selected:
|
|
410
|
+
console.print("[yellow]No valid selection, defaulting to all.[/]")
|
|
411
|
+
return list(TARGET_NAMES)
|
|
412
|
+
|
|
413
|
+
return selected
|
aes/commands/validate.py
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
"""aes validate — Validate .agent/ files against schemas."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
import click
|
|
8
|
+
from rich.console import Console
|
|
9
|
+
|
|
10
|
+
from aes.config import AGENT_DIR
|
|
11
|
+
from aes.validator import validate_agent_dir
|
|
12
|
+
|
|
13
|
+
console = Console()
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@click.command("validate")
|
|
17
|
+
@click.argument("path", default=".", type=click.Path(exists=True))
|
|
18
|
+
@click.option("--strict", is_flag=True, help="Promote quality warnings to errors.")
|
|
19
|
+
def validate_cmd(path: str, strict: bool) -> None:
|
|
20
|
+
"""Validate all .agent/ files against AES schemas.
|
|
21
|
+
|
|
22
|
+
PATH is the project root directory (default: current directory).
|
|
23
|
+
"""
|
|
24
|
+
project_root = Path(path).resolve()
|
|
25
|
+
agent_dir = project_root / AGENT_DIR
|
|
26
|
+
|
|
27
|
+
if not agent_dir.exists():
|
|
28
|
+
console.print(f"[red]Error:[/] No {AGENT_DIR}/ directory found at {project_root}")
|
|
29
|
+
console.print("[dim]Run 'aes init' to create one.[/]")
|
|
30
|
+
raise SystemExit(1)
|
|
31
|
+
|
|
32
|
+
console.print(f"[bold]Validating[/] {agent_dir}")
|
|
33
|
+
console.print()
|
|
34
|
+
|
|
35
|
+
results = validate_agent_dir(agent_dir)
|
|
36
|
+
|
|
37
|
+
passed = 0
|
|
38
|
+
failed = 0
|
|
39
|
+
warnings = 0
|
|
40
|
+
|
|
41
|
+
for result in results:
|
|
42
|
+
rel_path = result.file_path.relative_to(project_root)
|
|
43
|
+
is_warning = result.valid and result.errors
|
|
44
|
+
if is_warning and strict:
|
|
45
|
+
# Promote warnings to errors in strict mode
|
|
46
|
+
console.print(f" [red]FAIL[/] {rel_path}")
|
|
47
|
+
for error in result.errors:
|
|
48
|
+
console.print(f" {error}")
|
|
49
|
+
failed += 1
|
|
50
|
+
elif is_warning:
|
|
51
|
+
console.print(f" [yellow]WARN[/] {rel_path}")
|
|
52
|
+
for error in result.errors:
|
|
53
|
+
console.print(f" {error}")
|
|
54
|
+
warnings += 1
|
|
55
|
+
elif result.valid:
|
|
56
|
+
console.print(f" [green]PASS[/] {rel_path}")
|
|
57
|
+
passed += 1
|
|
58
|
+
else:
|
|
59
|
+
console.print(f" [red]FAIL[/] {rel_path}")
|
|
60
|
+
for error in result.errors:
|
|
61
|
+
console.print(f" {error}")
|
|
62
|
+
failed += 1
|
|
63
|
+
|
|
64
|
+
console.print()
|
|
65
|
+
summary_parts = []
|
|
66
|
+
if passed:
|
|
67
|
+
summary_parts.append(f"{passed} passed")
|
|
68
|
+
if warnings:
|
|
69
|
+
summary_parts.append(f"{warnings} warning(s)")
|
|
70
|
+
if failed:
|
|
71
|
+
summary_parts.append(f"{failed} failed")
|
|
72
|
+
|
|
73
|
+
if failed == 0:
|
|
74
|
+
console.print(f"[green]All valid.[/] {', '.join(summary_parts)}.")
|
|
75
|
+
else:
|
|
76
|
+
console.print(f"[red]{', '.join(summary_parts)}.[/]")
|
|
77
|
+
raise SystemExit(1)
|
aes/config.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
"""CLI configuration and paths."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import sys
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
# Resource directories — handle PyInstaller bundles and normal installs
|
|
9
|
+
if getattr(sys, "frozen", False):
|
|
10
|
+
# Running as PyInstaller bundle
|
|
11
|
+
_BASE = Path(sys._MEIPASS) # type: ignore[attr-defined]
|
|
12
|
+
SCHEMAS_DIR = _BASE / "aes" / "schemas"
|
|
13
|
+
SCAFFOLD_DIR = _BASE / "aes" / "scaffold"
|
|
14
|
+
else:
|
|
15
|
+
# Normal install (pip, editable, source)
|
|
16
|
+
SCHEMAS_DIR = Path(__file__).resolve().parent / "schemas"
|
|
17
|
+
SCAFFOLD_DIR = Path(__file__).resolve().parent / "scaffold"
|
|
18
|
+
|
|
19
|
+
# Standard directory and file names
|
|
20
|
+
AGENT_DIR = ".agent"
|
|
21
|
+
MANIFEST_FILE = "agent.yaml"
|
|
22
|
+
INSTRUCTIONS_FILE = "instructions.md"
|
|
23
|
+
PERMISSIONS_FILE = "permissions.yaml"
|
|
24
|
+
AGENTIGNORE_FILE = ".agentignore"
|
|
25
|
+
AGENT_MD_FILE = "AGENT.md"
|
|
26
|
+
SKILLS_DIR = "skills"
|
|
27
|
+
VENDOR_DIR = "vendor"
|
|
28
|
+
REGISTRY_DIR = "registry"
|
|
29
|
+
WORKFLOWS_DIR = "workflows"
|
|
30
|
+
COMMANDS_DIR = "commands"
|
|
31
|
+
MEMORY_DIR = "memory"
|
|
32
|
+
OVERRIDES_DIR = "overrides"
|
|
33
|
+
LOCAL_FILE = "local.yaml"
|
|
34
|
+
LOCAL_EXAMPLE_FILE = "local.example.yaml"
|
|
35
|
+
|
|
36
|
+
# Schema file mapping
|
|
37
|
+
SCHEMA_MAP = {
|
|
38
|
+
"agent": "agent.schema.json",
|
|
39
|
+
"skill": "skill.schema.json",
|
|
40
|
+
"workflow": "workflow.schema.json",
|
|
41
|
+
"registry": "registry.schema.json",
|
|
42
|
+
"permissions": "permissions.schema.json",
|
|
43
|
+
}
|