augint-opencodex 0.0.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- augint_opencodex-0.0.0/PKG-INFO +149 -0
- augint_opencodex-0.0.0/README.md +139 -0
- augint_opencodex-0.0.0/pyproject.toml +110 -0
- augint_opencodex-0.0.0/src/augint_opencodex/__init__.py +3 -0
- augint_opencodex-0.0.0/src/augint_opencodex/cli/__init__.py +1 -0
- augint_opencodex-0.0.0/src/augint_opencodex/cli/__main__.py +35 -0
- augint_opencodex-0.0.0/src/augint_opencodex/cli/commands/__init__.py +1 -0
- augint_opencodex-0.0.0/src/augint_opencodex/cli/commands/doctor.py +57 -0
- augint_opencodex-0.0.0/src/augint_opencodex/cli/commands/sync.py +95 -0
- augint_opencodex-0.0.0/src/augint_opencodex/diagnostics.py +92 -0
- augint_opencodex-0.0.0/src/augint_opencodex/ignore.py +75 -0
- augint_opencodex-0.0.0/src/augint_opencodex/manifest.py +81 -0
- augint_opencodex-0.0.0/src/augint_opencodex/profiles.py +140 -0
- augint_opencodex-0.0.0/src/augint_opencodex/py.typed +0 -0
- augint_opencodex-0.0.0/src/augint_opencodex/renderer.py +213 -0
- augint_opencodex-0.0.0/src/augint_opencodex/sync_engine.py +110 -0
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: augint-opencodex
|
|
3
|
+
Version: 0.0.0
|
|
4
|
+
Summary: Render local Codex and OpenCode repo config from .ai-codex.json
|
|
5
|
+
Author: Augmenting Integrations
|
|
6
|
+
Requires-Dist: click>=8.1.0
|
|
7
|
+
Requires-Dist: pydantic>=2.11.0
|
|
8
|
+
Requires-Python: >=3.13
|
|
9
|
+
Description-Content-Type: text/markdown
|
|
10
|
+
|
|
11
|
+
# augint-opencodex
|
|
12
|
+
|
|
13
|
+
`augint-opencodex` is a Python tool that renders local Codex and OpenCode project
|
|
14
|
+
configuration from a single tracked manifest, `.ai-codex.json`.
|
|
15
|
+
|
|
16
|
+
The first working slice is implemented here. It ships a real `ai-codex` CLI with:
|
|
17
|
+
|
|
18
|
+
- `ai-codex sync` to read `.ai-codex.json` and render `.ai-opencodex.md`,
|
|
19
|
+
`.codex/config.toml`, `opencode.json`, and shared skills
|
|
20
|
+
- `ai-codex doctor` to inspect manifest resolution, generated files, local ignore setup,
|
|
21
|
+
and staged generated artifacts
|
|
22
|
+
- a first-pass profile model with `augint` and `gov`
|
|
23
|
+
- local-only ignore handling through `.git/info/exclude`
|
|
24
|
+
|
|
25
|
+
## Installation
|
|
26
|
+
|
|
27
|
+
For local development:
|
|
28
|
+
|
|
29
|
+
```bash
|
|
30
|
+
uv sync --group dev
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
Once the package is published, the intended install flows are:
|
|
34
|
+
|
|
35
|
+
```bash
|
|
36
|
+
uvx --from augint-opencodex ai-codex sync
|
|
37
|
+
uv tool install augint-opencodex
|
|
38
|
+
ai-codex sync
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
## Manifest
|
|
42
|
+
|
|
43
|
+
This tool expects a tracked `.ai-codex.json` file in the target repository.
|
|
44
|
+
|
|
45
|
+
```json
|
|
46
|
+
{
|
|
47
|
+
"version": 1,
|
|
48
|
+
"profile": "augint",
|
|
49
|
+
"references": ["./ai-lls-lib"],
|
|
50
|
+
"blocked_paths": [
|
|
51
|
+
"**/secrets/**",
|
|
52
|
+
"**/*.pem",
|
|
53
|
+
"**/terraform.tfstate*"
|
|
54
|
+
],
|
|
55
|
+
"content_policy": {
|
|
56
|
+
"no_emojis": true,
|
|
57
|
+
"no_ai_mentions": true
|
|
58
|
+
},
|
|
59
|
+
"shell_guardrails": {
|
|
60
|
+
"ask": ["aws *", "terraform *", "kubectl *", "git push *"],
|
|
61
|
+
"deny": ["aws iam create*", "aws iam put*"]
|
|
62
|
+
},
|
|
63
|
+
"patterns": {
|
|
64
|
+
"org_python_library": true
|
|
65
|
+
},
|
|
66
|
+
"opencode": {
|
|
67
|
+
"enabled": true
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
The current schema lives in [`schemas/ai-codex.schema.json`](schemas/ai-codex.schema.json).
|
|
73
|
+
|
|
74
|
+
## Commands
|
|
75
|
+
|
|
76
|
+
Render files into the current repository:
|
|
77
|
+
|
|
78
|
+
```bash
|
|
79
|
+
uv run ai-codex sync
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
Preview pending changes without writing:
|
|
83
|
+
|
|
84
|
+
```bash
|
|
85
|
+
uv run ai-codex sync --dry-run
|
|
86
|
+
```
|
|
87
|
+
|
|
88
|
+
Fail if the repo is out of sync:
|
|
89
|
+
|
|
90
|
+
```bash
|
|
91
|
+
uv run ai-codex sync --check
|
|
92
|
+
```
|
|
93
|
+
|
|
94
|
+
Inspect the current repo state:
|
|
95
|
+
|
|
96
|
+
```bash
|
|
97
|
+
uv run ai-codex doctor
|
|
98
|
+
```
|
|
99
|
+
|
|
100
|
+
## Generated Files
|
|
101
|
+
|
|
102
|
+
The first slice writes:
|
|
103
|
+
|
|
104
|
+
- `.ai-opencodex.md`
|
|
105
|
+
- `.codex/config.toml`
|
|
106
|
+
- `opencode.json`
|
|
107
|
+
- `.agents/skills/README.md`
|
|
108
|
+
- `.agents/skills/org-python-tooling/SKILL.md` when
|
|
109
|
+
`patterns.org_python_library` is enabled
|
|
110
|
+
|
|
111
|
+
Generated outputs are added to `.git/info/exclude` by default so target repositories do not need
|
|
112
|
+
to commit them.
|
|
113
|
+
|
|
114
|
+
## Dogfooding This Repo
|
|
115
|
+
|
|
116
|
+
This repository is set up to dogfood the generated instructions flow without a root `AGENTS.md`.
|
|
117
|
+
|
|
118
|
+
1. Keep `.ai-codex.json` tracked in the repo root.
|
|
119
|
+
2. Run `uv run ai-codex sync` to generate `.ai-opencodex.md`, `.codex/config.toml`, and the other
|
|
120
|
+
local-only artifacts.
|
|
121
|
+
3. Start Codex with `CODEX_HOME=$(pwd)/.codex codex` so Codex uses the generated
|
|
122
|
+
`.codex/config.toml` as its home config and discovers `.ai-opencodex.md` via
|
|
123
|
+
`project_doc_fallback_filenames`.
|
|
124
|
+
|
|
125
|
+
Avoid creating a root `AGENTS.md` here. Codex checks `AGENTS.md` before fallback filenames in the
|
|
126
|
+
same directory, so a root `AGENTS.md` would shadow `.ai-opencodex.md` and split Codex from the
|
|
127
|
+
generated OpenCode instructions.
|
|
128
|
+
|
|
129
|
+
## Organizational Python Standard
|
|
130
|
+
|
|
131
|
+
This project uses `ai-lls-lib/` in the planning repo as the concrete reference for the
|
|
132
|
+
organization-wide Python package and tooling standard:
|
|
133
|
+
|
|
134
|
+
- `uv`-first packaging and development workflow
|
|
135
|
+
- `src/` layout and console scripts from `[project.scripts]`
|
|
136
|
+
- `ruff`, `mypy`, `pytest`, and `pre-commit`
|
|
137
|
+
- security and compliance checks in CI
|
|
138
|
+
- Conventional Commit and semantic-release-compatible versioning
|
|
139
|
+
- a stable Makefile task surface
|
|
140
|
+
|
|
141
|
+
## Development
|
|
142
|
+
|
|
143
|
+
```bash
|
|
144
|
+
make install
|
|
145
|
+
make test
|
|
146
|
+
make format
|
|
147
|
+
make typecheck
|
|
148
|
+
make build
|
|
149
|
+
```
|
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
# augint-opencodex
|
|
2
|
+
|
|
3
|
+
`augint-opencodex` is a Python tool that renders local Codex and OpenCode project
|
|
4
|
+
configuration from a single tracked manifest, `.ai-codex.json`.
|
|
5
|
+
|
|
6
|
+
The first working slice is implemented here. It ships a real `ai-codex` CLI with:
|
|
7
|
+
|
|
8
|
+
- `ai-codex sync` to read `.ai-codex.json` and render `.ai-opencodex.md`,
|
|
9
|
+
`.codex/config.toml`, `opencode.json`, and shared skills
|
|
10
|
+
- `ai-codex doctor` to inspect manifest resolution, generated files, local ignore setup,
|
|
11
|
+
and staged generated artifacts
|
|
12
|
+
- a first-pass profile model with `augint` and `gov`
|
|
13
|
+
- local-only ignore handling through `.git/info/exclude`
|
|
14
|
+
|
|
15
|
+
## Installation
|
|
16
|
+
|
|
17
|
+
For local development:
|
|
18
|
+
|
|
19
|
+
```bash
|
|
20
|
+
uv sync --group dev
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
Once the package is published, the intended install flows are:
|
|
24
|
+
|
|
25
|
+
```bash
|
|
26
|
+
uvx --from augint-opencodex ai-codex sync
|
|
27
|
+
uv tool install augint-opencodex
|
|
28
|
+
ai-codex sync
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
## Manifest
|
|
32
|
+
|
|
33
|
+
This tool expects a tracked `.ai-codex.json` file in the target repository.
|
|
34
|
+
|
|
35
|
+
```json
|
|
36
|
+
{
|
|
37
|
+
"version": 1,
|
|
38
|
+
"profile": "augint",
|
|
39
|
+
"references": ["./ai-lls-lib"],
|
|
40
|
+
"blocked_paths": [
|
|
41
|
+
"**/secrets/**",
|
|
42
|
+
"**/*.pem",
|
|
43
|
+
"**/terraform.tfstate*"
|
|
44
|
+
],
|
|
45
|
+
"content_policy": {
|
|
46
|
+
"no_emojis": true,
|
|
47
|
+
"no_ai_mentions": true
|
|
48
|
+
},
|
|
49
|
+
"shell_guardrails": {
|
|
50
|
+
"ask": ["aws *", "terraform *", "kubectl *", "git push *"],
|
|
51
|
+
"deny": ["aws iam create*", "aws iam put*"]
|
|
52
|
+
},
|
|
53
|
+
"patterns": {
|
|
54
|
+
"org_python_library": true
|
|
55
|
+
},
|
|
56
|
+
"opencode": {
|
|
57
|
+
"enabled": true
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
```
|
|
61
|
+
|
|
62
|
+
The current schema lives in [`schemas/ai-codex.schema.json`](schemas/ai-codex.schema.json).
|
|
63
|
+
|
|
64
|
+
## Commands
|
|
65
|
+
|
|
66
|
+
Render files into the current repository:
|
|
67
|
+
|
|
68
|
+
```bash
|
|
69
|
+
uv run ai-codex sync
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
Preview pending changes without writing:
|
|
73
|
+
|
|
74
|
+
```bash
|
|
75
|
+
uv run ai-codex sync --dry-run
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
Fail if the repo is out of sync:
|
|
79
|
+
|
|
80
|
+
```bash
|
|
81
|
+
uv run ai-codex sync --check
|
|
82
|
+
```
|
|
83
|
+
|
|
84
|
+
Inspect the current repo state:
|
|
85
|
+
|
|
86
|
+
```bash
|
|
87
|
+
uv run ai-codex doctor
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
## Generated Files
|
|
91
|
+
|
|
92
|
+
The first slice writes:
|
|
93
|
+
|
|
94
|
+
- `.ai-opencodex.md`
|
|
95
|
+
- `.codex/config.toml`
|
|
96
|
+
- `opencode.json`
|
|
97
|
+
- `.agents/skills/README.md`
|
|
98
|
+
- `.agents/skills/org-python-tooling/SKILL.md` when
|
|
99
|
+
`patterns.org_python_library` is enabled
|
|
100
|
+
|
|
101
|
+
Generated outputs are added to `.git/info/exclude` by default so target repositories do not need
|
|
102
|
+
to commit them.
|
|
103
|
+
|
|
104
|
+
## Dogfooding This Repo
|
|
105
|
+
|
|
106
|
+
This repository is set up to dogfood the generated instructions flow without a root `AGENTS.md`.
|
|
107
|
+
|
|
108
|
+
1. Keep `.ai-codex.json` tracked in the repo root.
|
|
109
|
+
2. Run `uv run ai-codex sync` to generate `.ai-opencodex.md`, `.codex/config.toml`, and the other
|
|
110
|
+
local-only artifacts.
|
|
111
|
+
3. Start Codex with `CODEX_HOME=$(pwd)/.codex codex` so Codex uses the generated
|
|
112
|
+
`.codex/config.toml` as its home config and discovers `.ai-opencodex.md` via
|
|
113
|
+
`project_doc_fallback_filenames`.
|
|
114
|
+
|
|
115
|
+
Avoid creating a root `AGENTS.md` here. Codex checks `AGENTS.md` before fallback filenames in the
|
|
116
|
+
same directory, so a root `AGENTS.md` would shadow `.ai-opencodex.md` and split Codex from the
|
|
117
|
+
generated OpenCode instructions.
|
|
118
|
+
|
|
119
|
+
## Organizational Python Standard
|
|
120
|
+
|
|
121
|
+
This project uses `ai-lls-lib/` in the planning repo as the concrete reference for the
|
|
122
|
+
organization-wide Python package and tooling standard:
|
|
123
|
+
|
|
124
|
+
- `uv`-first packaging and development workflow
|
|
125
|
+
- `src/` layout and console scripts from `[project.scripts]`
|
|
126
|
+
- `ruff`, `mypy`, `pytest`, and `pre-commit`
|
|
127
|
+
- security and compliance checks in CI
|
|
128
|
+
- Conventional Commit and semantic-release-compatible versioning
|
|
129
|
+
- a stable Makefile task surface
|
|
130
|
+
|
|
131
|
+
## Development
|
|
132
|
+
|
|
133
|
+
```bash
|
|
134
|
+
make install
|
|
135
|
+
make test
|
|
136
|
+
make format
|
|
137
|
+
make typecheck
|
|
138
|
+
make build
|
|
139
|
+
```
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "augint-opencodex"
|
|
3
|
+
version = "0.0.0"
|
|
4
|
+
description = "Render local Codex and OpenCode repo config from .ai-codex.json"
|
|
5
|
+
authors = [{ name = "Augmenting Integrations" }]
|
|
6
|
+
readme = "README.md"
|
|
7
|
+
requires-python = ">=3.13"
|
|
8
|
+
dependencies = [
|
|
9
|
+
"click>=8.1.0",
|
|
10
|
+
"pydantic>=2.11.0",
|
|
11
|
+
]
|
|
12
|
+
|
|
13
|
+
[project.scripts]
|
|
14
|
+
ai-codex = "augint_opencodex.cli.__main__:main"
|
|
15
|
+
ai-opencodex = "augint_opencodex.cli.__main__:main"
|
|
16
|
+
ai-opencode = "augint_opencodex.cli.__main__:main"
|
|
17
|
+
|
|
18
|
+
[build-system]
|
|
19
|
+
requires = ["uv_build>=0.9"]
|
|
20
|
+
build-backend = "uv_build"
|
|
21
|
+
|
|
22
|
+
[tool.ruff]
|
|
23
|
+
line-length = 100
|
|
24
|
+
target-version = "py313"
|
|
25
|
+
|
|
26
|
+
[tool.ruff.lint]
|
|
27
|
+
select = ["E", "F", "I", "W", "B", "C4", "UP", "DTZ"]
|
|
28
|
+
ignore = ["E501"]
|
|
29
|
+
|
|
30
|
+
[tool.ruff.lint.isort]
|
|
31
|
+
known-first-party = ["augint_opencodex"]
|
|
32
|
+
|
|
33
|
+
[tool.mypy]
|
|
34
|
+
python_version = "3.13"
|
|
35
|
+
warn_return_any = true
|
|
36
|
+
warn_unused_configs = true
|
|
37
|
+
disallow_untyped_defs = true
|
|
38
|
+
|
|
39
|
+
[[tool.mypy.overrides]]
|
|
40
|
+
module = "augint_opencodex.cli.*"
|
|
41
|
+
disallow_untyped_defs = false
|
|
42
|
+
|
|
43
|
+
[tool.pytest.ini_options]
|
|
44
|
+
testpaths = ["tests"]
|
|
45
|
+
python_files = ["test_*.py"]
|
|
46
|
+
addopts = "-ra -q --strict-markers"
|
|
47
|
+
|
|
48
|
+
[tool.coverage.run]
|
|
49
|
+
source = ["src"]
|
|
50
|
+
omit = ["*/tests/*", "*/test_*.py"]
|
|
51
|
+
|
|
52
|
+
[tool.semantic_release]
|
|
53
|
+
assets = ["uv.lock"]
|
|
54
|
+
commit_message = "chore(release): augint-opencodex {version}\n\nAutomatically generated by python-semantic-release [skip ci]"
|
|
55
|
+
commit_parser = "angular"
|
|
56
|
+
logging_use_named_masks = false
|
|
57
|
+
major_on_zero = false
|
|
58
|
+
allow_zero_version = true
|
|
59
|
+
no_git_verify = false
|
|
60
|
+
tag_format = "augint-opencodex-v{version}"
|
|
61
|
+
version_toml = ["pyproject.toml:project.version"]
|
|
62
|
+
version_variables = ["src/augint_opencodex/__init__.py:__version__"]
|
|
63
|
+
build_command = "uv lock && uv build"
|
|
64
|
+
|
|
65
|
+
[tool.semantic_release.branches.main]
|
|
66
|
+
match = "main"
|
|
67
|
+
prerelease = false
|
|
68
|
+
|
|
69
|
+
[tool.semantic_release.changelog]
|
|
70
|
+
mode = "update"
|
|
71
|
+
changelog_file = "CHANGELOG.md"
|
|
72
|
+
exclude_commit_patterns = [
|
|
73
|
+
'''chore(?:\([^)]*?\))?: .+''',
|
|
74
|
+
'''ci(?:\([^)]*?\))?: .+''',
|
|
75
|
+
'''refactor(?:\([^)]*?\))?: .+''',
|
|
76
|
+
'''style(?:\([^)]*?\))?: .+''',
|
|
77
|
+
'''test(?:\([^)]*?\))?: .+''',
|
|
78
|
+
'''build\((?!deps\): .+)''',
|
|
79
|
+
'''Initial [Cc]ommit.*''',
|
|
80
|
+
]
|
|
81
|
+
|
|
82
|
+
[tool.semantic_release.remote]
|
|
83
|
+
name = "origin"
|
|
84
|
+
type = "github"
|
|
85
|
+
ignore_token_for_push = false
|
|
86
|
+
insecure = false
|
|
87
|
+
|
|
88
|
+
[tool.semantic_release.remote.token]
|
|
89
|
+
env = "GH_TOKEN"
|
|
90
|
+
|
|
91
|
+
[tool.semantic_release.publish]
|
|
92
|
+
dist_glob_patterns = ["dist/*"]
|
|
93
|
+
upload_to_vcs_release = true
|
|
94
|
+
|
|
95
|
+
[dependency-groups]
|
|
96
|
+
dev = [
|
|
97
|
+
"augint-github>=1.12.0",
|
|
98
|
+
"augint-shell>=0.72.0",
|
|
99
|
+
"augint-tools>=4.10.0",
|
|
100
|
+
"bandit>=1.8.0",
|
|
101
|
+
"mypy>=1.16.0",
|
|
102
|
+
"pip-audit>=2.9.0",
|
|
103
|
+
"pip-licenses>=5.0.0",
|
|
104
|
+
"pre-commit>=4.2.0",
|
|
105
|
+
"pytest>=8.3.0",
|
|
106
|
+
"pytest-cov>=6.0.0",
|
|
107
|
+
"pytest-html>=4.1.0",
|
|
108
|
+
"python-semantic-release>=10.4.0",
|
|
109
|
+
"ruff>=0.11.0",
|
|
110
|
+
]
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""CLI package for augint-opencodex."""
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import sys
|
|
4
|
+
|
|
5
|
+
import click
|
|
6
|
+
|
|
7
|
+
from augint_opencodex import __version__
|
|
8
|
+
from augint_opencodex.cli.commands import doctor, sync
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@click.group()
|
|
12
|
+
@click.version_option(version=__version__, prog_name="ai-codex")
|
|
13
|
+
def cli() -> None:
|
|
14
|
+
"""Render local Codex and OpenCode repo config from `.ai-codex.json`."""
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
cli.add_command(sync.sync_command)
|
|
18
|
+
cli.add_command(doctor.doctor_command)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def main() -> None:
|
|
22
|
+
"""Run the `ai-codex` CLI."""
|
|
23
|
+
|
|
24
|
+
try:
|
|
25
|
+
cli()
|
|
26
|
+
except click.ClickException as exc:
|
|
27
|
+
exc.show()
|
|
28
|
+
sys.exit(exc.exit_code)
|
|
29
|
+
except Exception as exc: # pragma: no cover - defensive CLI boundary
|
|
30
|
+
click.echo(f"Error: {exc}", err=True)
|
|
31
|
+
sys.exit(1)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
if __name__ == "__main__":
|
|
35
|
+
main()
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""CLI commands."""
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
import click
|
|
6
|
+
|
|
7
|
+
from augint_opencodex.diagnostics import inspect_project
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@click.command(name="doctor")
|
|
11
|
+
@click.option(
|
|
12
|
+
"--project-root",
|
|
13
|
+
type=click.Path(file_okay=False, path_type=Path),
|
|
14
|
+
default=Path("."),
|
|
15
|
+
show_default=True,
|
|
16
|
+
help="Repository root to inspect.",
|
|
17
|
+
)
|
|
18
|
+
@click.option(
|
|
19
|
+
"--manifest",
|
|
20
|
+
type=click.Path(dir_okay=False, path_type=Path),
|
|
21
|
+
help="Explicit manifest path. Defaults to PROJECT_ROOT/.ai-codex.json.",
|
|
22
|
+
)
|
|
23
|
+
def doctor_command(project_root: Path, manifest: Path | None) -> None:
|
|
24
|
+
"""Inspect manifest resolution and generated file state."""
|
|
25
|
+
|
|
26
|
+
report = inspect_project(project_root, manifest_path=manifest)
|
|
27
|
+
click.echo(f"Project root: {report.policy.project_root}")
|
|
28
|
+
click.echo(f"Manifest: {report.policy.manifest_path}")
|
|
29
|
+
click.echo(f"Profile: {report.policy.profile_name}")
|
|
30
|
+
|
|
31
|
+
for expected_file in report.expected_files:
|
|
32
|
+
status = "ok" if expected_file not in report.missing_files else "missing"
|
|
33
|
+
click.echo(f"{status}: {expected_file.as_posix()}")
|
|
34
|
+
|
|
35
|
+
for stale_file in report.stale_files:
|
|
36
|
+
click.echo(f"stale: {stale_file.as_posix()}")
|
|
37
|
+
|
|
38
|
+
if not report.exclude_update.available:
|
|
39
|
+
click.echo("git exclude: unavailable")
|
|
40
|
+
elif report.exclude_update.needs_update:
|
|
41
|
+
click.echo("git exclude: missing ai-codex block")
|
|
42
|
+
else:
|
|
43
|
+
click.echo("git exclude: ok")
|
|
44
|
+
|
|
45
|
+
if report.root_agents_present:
|
|
46
|
+
click.echo(
|
|
47
|
+
"warning: root AGENTS.md is present and will shadow .ai-opencodex.md for Codex discovery"
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
if report.staged_generated_files:
|
|
51
|
+
for staged_file in report.staged_generated_files:
|
|
52
|
+
click.echo(f"warning: staged generated file {staged_file.as_posix()}")
|
|
53
|
+
else:
|
|
54
|
+
click.echo("staged generated files: none")
|
|
55
|
+
|
|
56
|
+
if report.missing_files or report.stale_files:
|
|
57
|
+
click.echo("Run `ai-codex sync` to reconcile missing or stale generated files.")
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Literal
|
|
5
|
+
|
|
6
|
+
import click
|
|
7
|
+
|
|
8
|
+
from augint_opencodex.sync_engine import SyncReport, sync_project
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@click.command(name="sync")
|
|
12
|
+
@click.option(
|
|
13
|
+
"--project-root",
|
|
14
|
+
type=click.Path(file_okay=False, path_type=Path),
|
|
15
|
+
default=Path("."),
|
|
16
|
+
show_default=True,
|
|
17
|
+
help="Repository root to sync.",
|
|
18
|
+
)
|
|
19
|
+
@click.option(
|
|
20
|
+
"--manifest",
|
|
21
|
+
type=click.Path(dir_okay=False, path_type=Path),
|
|
22
|
+
help="Explicit manifest path. Defaults to PROJECT_ROOT/.ai-codex.json.",
|
|
23
|
+
)
|
|
24
|
+
@click.option("--dry-run", is_flag=True, help="Preview changes without writing files.")
|
|
25
|
+
@click.option("--check", is_flag=True, help="Exit non-zero when generated files are out of sync.")
|
|
26
|
+
def sync_command(
|
|
27
|
+
project_root: Path,
|
|
28
|
+
manifest: Path | None,
|
|
29
|
+
dry_run: bool,
|
|
30
|
+
check: bool,
|
|
31
|
+
) -> None:
|
|
32
|
+
"""Render project-local Codex and OpenCode files."""
|
|
33
|
+
|
|
34
|
+
if dry_run and check:
|
|
35
|
+
raise click.ClickException("`--dry-run` and `--check` cannot be used together.")
|
|
36
|
+
|
|
37
|
+
mode: Literal["write", "dry-run", "check"] = (
|
|
38
|
+
"check" if check else "dry-run" if dry_run else "write"
|
|
39
|
+
)
|
|
40
|
+
report = sync_project(project_root, manifest_path=manifest, mode=mode)
|
|
41
|
+
_print_report(report)
|
|
42
|
+
|
|
43
|
+
if check and report.needs_changes:
|
|
44
|
+
raise click.ClickException("Generated files are out of sync. Run `ai-codex sync`.")
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _print_report(report: SyncReport) -> None:
|
|
48
|
+
root = report.policy.project_root
|
|
49
|
+
click.echo(f"Project root: {root}")
|
|
50
|
+
click.echo(f"Manifest: {report.policy.manifest_path}")
|
|
51
|
+
click.echo(f"Profile: {report.policy.profile_name}")
|
|
52
|
+
|
|
53
|
+
if report.changed_files:
|
|
54
|
+
verb = {
|
|
55
|
+
"write": "wrote",
|
|
56
|
+
"dry-run": "would write",
|
|
57
|
+
"check": "needs update",
|
|
58
|
+
}[report.mode]
|
|
59
|
+
for path in report.changed_files:
|
|
60
|
+
click.echo(f"{verb}: {_display_path(path)}")
|
|
61
|
+
else:
|
|
62
|
+
click.echo("Generated files are already in sync.")
|
|
63
|
+
|
|
64
|
+
if report.stale_files:
|
|
65
|
+
verb = {
|
|
66
|
+
"write": "removed",
|
|
67
|
+
"dry-run": "would remove",
|
|
68
|
+
"check": "needs removal",
|
|
69
|
+
}[report.mode]
|
|
70
|
+
for path in report.stale_files:
|
|
71
|
+
click.echo(f"{verb}: {_display_path(path)}")
|
|
72
|
+
|
|
73
|
+
if report.unchanged_files:
|
|
74
|
+
click.echo(f"unchanged: {len(report.unchanged_files)} file(s)")
|
|
75
|
+
|
|
76
|
+
exclude_update = report.exclude_update
|
|
77
|
+
if not exclude_update.available:
|
|
78
|
+
click.echo("git exclude: skipped (no .git/info directory found)")
|
|
79
|
+
elif exclude_update.updated:
|
|
80
|
+
click.echo(f"git exclude: updated {_display_path(exclude_update.path, root)}")
|
|
81
|
+
elif exclude_update.needs_update:
|
|
82
|
+
click.echo(f"git exclude: would update {_display_path(exclude_update.path, root)}")
|
|
83
|
+
else:
|
|
84
|
+
click.echo("git exclude: already current")
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def _display_path(path: Path | None, base: Path | None = None) -> str:
|
|
88
|
+
if path is None:
|
|
89
|
+
return "<missing>"
|
|
90
|
+
if base is None:
|
|
91
|
+
return path.as_posix()
|
|
92
|
+
try:
|
|
93
|
+
return path.relative_to(base).as_posix()
|
|
94
|
+
except ValueError:
|
|
95
|
+
return path.as_posix()
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import subprocess
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
from augint_opencodex.ignore import ExcludeUpdate, ensure_git_exclude
|
|
8
|
+
from augint_opencodex.manifest import load_manifest
|
|
9
|
+
from augint_opencodex.profiles import ResolvedPolicy, resolve_policy
|
|
10
|
+
from augint_opencodex.renderer import managed_file_paths, render_project
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass(frozen=True)
|
|
14
|
+
class DoctorReport:
|
|
15
|
+
"""Diagnostics for a target repository."""
|
|
16
|
+
|
|
17
|
+
policy: ResolvedPolicy
|
|
18
|
+
expected_files: tuple[Path, ...]
|
|
19
|
+
missing_files: tuple[Path, ...]
|
|
20
|
+
stale_files: tuple[Path, ...]
|
|
21
|
+
exclude_update: ExcludeUpdate
|
|
22
|
+
staged_generated_files: tuple[Path, ...]
|
|
23
|
+
root_agents_present: bool
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def inspect_project(project_root: Path, manifest_path: Path | None = None) -> DoctorReport:
|
|
27
|
+
"""Inspect the current project and report sync-related state."""
|
|
28
|
+
|
|
29
|
+
resolved_root = project_root.resolve()
|
|
30
|
+
resolved_manifest_path = _resolve_manifest_path(resolved_root, manifest_path)
|
|
31
|
+
manifest = load_manifest(resolved_manifest_path)
|
|
32
|
+
policy = resolve_policy(manifest, resolved_root, resolved_manifest_path)
|
|
33
|
+
rendered_files = render_project(policy)
|
|
34
|
+
expected_files = tuple(rendered_file.path for rendered_file in rendered_files)
|
|
35
|
+
missing_files = tuple(
|
|
36
|
+
rendered_file.path
|
|
37
|
+
for rendered_file in rendered_files
|
|
38
|
+
if not (resolved_root / rendered_file.path).exists()
|
|
39
|
+
)
|
|
40
|
+
stale_files = tuple(
|
|
41
|
+
managed_path
|
|
42
|
+
for managed_path in managed_file_paths()
|
|
43
|
+
if managed_path not in expected_files and (resolved_root / managed_path).exists()
|
|
44
|
+
)
|
|
45
|
+
exclude_update = ensure_git_exclude(resolved_root, write=False)
|
|
46
|
+
staged_generated_files = _find_staged_generated_files(resolved_root, managed_file_paths())
|
|
47
|
+
root_agents_present = (resolved_root / "AGENTS.md").exists()
|
|
48
|
+
|
|
49
|
+
return DoctorReport(
|
|
50
|
+
policy=policy,
|
|
51
|
+
expected_files=expected_files,
|
|
52
|
+
missing_files=missing_files,
|
|
53
|
+
stale_files=stale_files,
|
|
54
|
+
exclude_update=exclude_update,
|
|
55
|
+
staged_generated_files=staged_generated_files,
|
|
56
|
+
root_agents_present=root_agents_present,
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def _resolve_manifest_path(project_root: Path, manifest_path: Path | None) -> Path:
|
|
61
|
+
if manifest_path is not None:
|
|
62
|
+
return manifest_path.resolve()
|
|
63
|
+
return (project_root / ".ai-codex.json").resolve()
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def _find_staged_generated_files(
|
|
67
|
+
project_root: Path, expected_files: tuple[Path, ...]
|
|
68
|
+
) -> tuple[Path, ...]:
|
|
69
|
+
if not (project_root / ".git").exists():
|
|
70
|
+
return ()
|
|
71
|
+
|
|
72
|
+
command = [
|
|
73
|
+
"git",
|
|
74
|
+
"-C",
|
|
75
|
+
str(project_root),
|
|
76
|
+
"diff",
|
|
77
|
+
"--cached",
|
|
78
|
+
"--name-only",
|
|
79
|
+
"--",
|
|
80
|
+
*(path.as_posix() for path in expected_files),
|
|
81
|
+
]
|
|
82
|
+
result = subprocess.run(
|
|
83
|
+
command,
|
|
84
|
+
capture_output=True,
|
|
85
|
+
text=True,
|
|
86
|
+
check=False,
|
|
87
|
+
)
|
|
88
|
+
if result.returncode not in (0, 1):
|
|
89
|
+
return ()
|
|
90
|
+
|
|
91
|
+
staged_files = [line.strip() for line in result.stdout.splitlines() if line.strip()]
|
|
92
|
+
return tuple(Path(staged_file) for staged_file in staged_files)
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
GENERATED_IGNORE_PATTERNS = (
|
|
7
|
+
"/.ai-opencodex.md",
|
|
8
|
+
"/.ai-codex.md",
|
|
9
|
+
"/.codex/",
|
|
10
|
+
"/opencode.json",
|
|
11
|
+
"/.opencode/",
|
|
12
|
+
"/.agents/skills/",
|
|
13
|
+
)
|
|
14
|
+
BLOCK_START = "# BEGIN ai-codex generated"
|
|
15
|
+
BLOCK_END = "# END ai-codex generated"
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass(frozen=True)
|
|
19
|
+
class ExcludeUpdate:
|
|
20
|
+
"""The state of `.git/info/exclude` for generated files."""
|
|
21
|
+
|
|
22
|
+
path: Path | None
|
|
23
|
+
available: bool
|
|
24
|
+
needs_update: bool
|
|
25
|
+
updated: bool
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def ensure_git_exclude(project_root: Path, *, write: bool) -> ExcludeUpdate:
|
|
29
|
+
"""Add or refresh the generated ignore block in `.git/info/exclude`."""
|
|
30
|
+
|
|
31
|
+
exclude_path = project_root / ".git" / "info" / "exclude"
|
|
32
|
+
if not exclude_path.parent.exists():
|
|
33
|
+
return ExcludeUpdate(
|
|
34
|
+
path=exclude_path,
|
|
35
|
+
available=False,
|
|
36
|
+
needs_update=False,
|
|
37
|
+
updated=False,
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
existing = exclude_path.read_text(encoding="utf-8") if exclude_path.exists() else ""
|
|
41
|
+
desired = _merge_block(existing)
|
|
42
|
+
needs_update = desired != existing
|
|
43
|
+
|
|
44
|
+
if write and needs_update:
|
|
45
|
+
exclude_path.parent.mkdir(parents=True, exist_ok=True)
|
|
46
|
+
exclude_path.write_text(desired, encoding="utf-8")
|
|
47
|
+
|
|
48
|
+
return ExcludeUpdate(
|
|
49
|
+
path=exclude_path,
|
|
50
|
+
available=True,
|
|
51
|
+
needs_update=needs_update,
|
|
52
|
+
updated=write and needs_update,
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def _merge_block(existing: str) -> str:
|
|
57
|
+
generated_block = "\n".join(
|
|
58
|
+
(
|
|
59
|
+
BLOCK_START,
|
|
60
|
+
*GENERATED_IGNORE_PATTERNS,
|
|
61
|
+
BLOCK_END,
|
|
62
|
+
)
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
if BLOCK_START in existing and BLOCK_END in existing:
|
|
66
|
+
start = existing.index(BLOCK_START)
|
|
67
|
+
end = existing.index(BLOCK_END, start) + len(BLOCK_END)
|
|
68
|
+
updated = existing[:start].rstrip("\n")
|
|
69
|
+
suffix = existing[end:].lstrip("\n")
|
|
70
|
+
pieces = [part for part in (updated, generated_block, suffix) if part]
|
|
71
|
+
return "\n\n".join(pieces) + "\n"
|
|
72
|
+
|
|
73
|
+
stripped_existing = existing.strip("\n")
|
|
74
|
+
pieces = [part for part in (stripped_existing, generated_block) if part]
|
|
75
|
+
return "\n\n".join(pieces) + "\n"
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Literal
|
|
6
|
+
|
|
7
|
+
from pydantic import BaseModel, ConfigDict, Field, ValidationError
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class ManifestError(ValueError):
|
|
11
|
+
"""Raised when `.ai-codex.json` cannot be loaded or validated."""
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class ContentPolicy(BaseModel):
|
|
15
|
+
"""Output-style constraints applied by the generated profile."""
|
|
16
|
+
|
|
17
|
+
model_config = ConfigDict(extra="forbid")
|
|
18
|
+
|
|
19
|
+
no_emojis: bool = True
|
|
20
|
+
no_ai_mentions: bool = True
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class ShellGuardrails(BaseModel):
|
|
24
|
+
"""Shell command patterns that should prompt or be denied."""
|
|
25
|
+
|
|
26
|
+
model_config = ConfigDict(extra="forbid")
|
|
27
|
+
|
|
28
|
+
ask: list[str] = Field(default_factory=list)
|
|
29
|
+
deny: list[str] = Field(default_factory=list)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class PatternFlags(BaseModel):
|
|
33
|
+
"""Named organizational patterns that can influence generated output."""
|
|
34
|
+
|
|
35
|
+
model_config = ConfigDict(extra="forbid")
|
|
36
|
+
|
|
37
|
+
org_python_library: bool = False
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class OpenCodeConfig(BaseModel):
|
|
41
|
+
"""Compatibility settings for OpenCode generation."""
|
|
42
|
+
|
|
43
|
+
model_config = ConfigDict(extra="forbid")
|
|
44
|
+
|
|
45
|
+
enabled: bool = True
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class Manifest(BaseModel):
|
|
49
|
+
"""Tracked repo manifest used to render local agent config."""
|
|
50
|
+
|
|
51
|
+
model_config = ConfigDict(extra="forbid", populate_by_name=True)
|
|
52
|
+
|
|
53
|
+
schema_uri: str | None = Field(default=None, alias="$schema")
|
|
54
|
+
version: Literal[1]
|
|
55
|
+
profile: Literal["augint", "gov"]
|
|
56
|
+
references: list[str] = Field(default_factory=list)
|
|
57
|
+
blocked_paths: list[str] = Field(default_factory=list)
|
|
58
|
+
content_policy: ContentPolicy = Field(default_factory=ContentPolicy)
|
|
59
|
+
shell_guardrails: ShellGuardrails = Field(default_factory=ShellGuardrails)
|
|
60
|
+
patterns: PatternFlags = Field(default_factory=PatternFlags)
|
|
61
|
+
opencode: OpenCodeConfig = Field(default_factory=OpenCodeConfig)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def load_manifest(path: Path) -> Manifest:
|
|
65
|
+
"""Load and validate a manifest file from disk."""
|
|
66
|
+
|
|
67
|
+
resolved_path = path.resolve()
|
|
68
|
+
if not resolved_path.exists():
|
|
69
|
+
raise ManifestError(f"Manifest not found: {resolved_path}")
|
|
70
|
+
|
|
71
|
+
try:
|
|
72
|
+
payload = json.loads(resolved_path.read_text(encoding="utf-8"))
|
|
73
|
+
except json.JSONDecodeError as exc:
|
|
74
|
+
raise ManifestError(f"Manifest is not valid JSON: {resolved_path}") from exc
|
|
75
|
+
except OSError as exc:
|
|
76
|
+
raise ManifestError(f"Unable to read manifest: {resolved_path}") from exc
|
|
77
|
+
|
|
78
|
+
try:
|
|
79
|
+
return Manifest.model_validate(payload)
|
|
80
|
+
except ValidationError as exc:
|
|
81
|
+
raise ManifestError(f"Manifest validation failed for {resolved_path}: {exc}") from exc
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
from augint_opencodex.manifest import Manifest
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@dataclass(frozen=True)
|
|
10
|
+
class ProfileDefinition:
|
|
11
|
+
"""Resolved defaults for a named profile before manifest overrides."""
|
|
12
|
+
|
|
13
|
+
name: str
|
|
14
|
+
summary: str
|
|
15
|
+
instruction_bullets: tuple[str, ...]
|
|
16
|
+
blocked_paths: tuple[str, ...]
|
|
17
|
+
ask_patterns: tuple[str, ...]
|
|
18
|
+
deny_patterns: tuple[str, ...]
|
|
19
|
+
approval_policy: str = "on-request"
|
|
20
|
+
sandbox_mode: str = "workspace-write"
|
|
21
|
+
web_search: str = "live"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@dataclass(frozen=True)
|
|
25
|
+
class ResolvedPolicy:
|
|
26
|
+
"""Effective policy after layering baseline, profile, and manifest settings."""
|
|
27
|
+
|
|
28
|
+
project_root: Path
|
|
29
|
+
manifest_path: Path
|
|
30
|
+
profile_name: str
|
|
31
|
+
profile_summary: str
|
|
32
|
+
references: tuple[str, ...]
|
|
33
|
+
blocked_paths: tuple[str, ...]
|
|
34
|
+
ask_patterns: tuple[str, ...]
|
|
35
|
+
deny_patterns: tuple[str, ...]
|
|
36
|
+
no_emojis: bool
|
|
37
|
+
no_ai_mentions: bool
|
|
38
|
+
org_python_library: bool
|
|
39
|
+
opencode_enabled: bool
|
|
40
|
+
codex_approval_policy: str
|
|
41
|
+
codex_sandbox_mode: str
|
|
42
|
+
codex_web_search: str
|
|
43
|
+
instruction_bullets: tuple[str, ...]
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
BASELINE_INSTRUCTION_BULLETS = (
|
|
47
|
+
"Use `.ai-codex.json` as the tracked source of truth for local agent configuration.",
|
|
48
|
+
"Use `.ai-opencodex.md` as the generated repo instruction file.",
|
|
49
|
+
"Avoid a root `AGENTS.md` in the repository root because it shadows fallback instruction filenames for Codex discovery.",
|
|
50
|
+
"Treat `.codex/config.toml`, `opencode.json`, and `.agents/skills/` as generated local artifacts.",
|
|
51
|
+
"Re-run `ai-codex sync` after manifest changes instead of editing generated files by hand.",
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
PROFILE_DEFINITIONS: dict[str, ProfileDefinition] = {
|
|
56
|
+
"augint": ProfileDefinition(
|
|
57
|
+
name="augint",
|
|
58
|
+
summary="Standard Augmenting Integrations work profile with live web access and local agent guidance.",
|
|
59
|
+
instruction_bullets=(
|
|
60
|
+
"Prefer practical, minimal changes that preserve the existing repository's patterns.",
|
|
61
|
+
"Treat shell guardrails as real approval prompts for risky operational commands.",
|
|
62
|
+
),
|
|
63
|
+
blocked_paths=(
|
|
64
|
+
"**/secrets/**",
|
|
65
|
+
"**/*.pem",
|
|
66
|
+
"**/terraform.tfstate*",
|
|
67
|
+
),
|
|
68
|
+
ask_patterns=(
|
|
69
|
+
"aws *",
|
|
70
|
+
"terraform *",
|
|
71
|
+
"kubectl *",
|
|
72
|
+
"git push *",
|
|
73
|
+
),
|
|
74
|
+
deny_patterns=(),
|
|
75
|
+
),
|
|
76
|
+
"gov": ProfileDefinition(
|
|
77
|
+
name="gov",
|
|
78
|
+
summary="Stricter overlay for compliance-sensitive repositories with broader defaults and more conservative command handling.",
|
|
79
|
+
instruction_bullets=(
|
|
80
|
+
"Use the same working model as `augint`, but be more conservative with compliance-sensitive changes.",
|
|
81
|
+
"Escalate or slow down when commands or outputs could affect regulated systems or sensitive data.",
|
|
82
|
+
),
|
|
83
|
+
blocked_paths=(
|
|
84
|
+
"**/secrets/**",
|
|
85
|
+
"**/*.pem",
|
|
86
|
+
"**/*.key",
|
|
87
|
+
"**/*.p12",
|
|
88
|
+
"**/terraform.tfstate*",
|
|
89
|
+
"**/.aws/**",
|
|
90
|
+
),
|
|
91
|
+
ask_patterns=(
|
|
92
|
+
"aws *",
|
|
93
|
+
"terraform *",
|
|
94
|
+
"kubectl *",
|
|
95
|
+
"git push *",
|
|
96
|
+
"az *",
|
|
97
|
+
"gcloud *",
|
|
98
|
+
"gh secret *",
|
|
99
|
+
),
|
|
100
|
+
deny_patterns=(
|
|
101
|
+
"aws iam create*",
|
|
102
|
+
"aws iam put*",
|
|
103
|
+
"terraform destroy *",
|
|
104
|
+
),
|
|
105
|
+
),
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def resolve_policy(manifest: Manifest, project_root: Path, manifest_path: Path) -> ResolvedPolicy:
|
|
110
|
+
"""Merge baseline policy, profile defaults, and manifest settings."""
|
|
111
|
+
|
|
112
|
+
profile = PROFILE_DEFINITIONS[manifest.profile]
|
|
113
|
+
return ResolvedPolicy(
|
|
114
|
+
project_root=project_root.resolve(),
|
|
115
|
+
manifest_path=manifest_path.resolve(),
|
|
116
|
+
profile_name=profile.name,
|
|
117
|
+
profile_summary=profile.summary,
|
|
118
|
+
references=_dedupe(manifest.references),
|
|
119
|
+
blocked_paths=_dedupe(profile.blocked_paths + tuple(manifest.blocked_paths)),
|
|
120
|
+
ask_patterns=_dedupe(profile.ask_patterns + tuple(manifest.shell_guardrails.ask)),
|
|
121
|
+
deny_patterns=_dedupe(profile.deny_patterns + tuple(manifest.shell_guardrails.deny)),
|
|
122
|
+
no_emojis=manifest.content_policy.no_emojis,
|
|
123
|
+
no_ai_mentions=manifest.content_policy.no_ai_mentions,
|
|
124
|
+
org_python_library=manifest.patterns.org_python_library,
|
|
125
|
+
opencode_enabled=manifest.opencode.enabled,
|
|
126
|
+
codex_approval_policy=profile.approval_policy,
|
|
127
|
+
codex_sandbox_mode=profile.sandbox_mode,
|
|
128
|
+
codex_web_search=profile.web_search,
|
|
129
|
+
instruction_bullets=BASELINE_INSTRUCTION_BULLETS + profile.instruction_bullets,
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def _dedupe(values: tuple[str, ...] | list[str]) -> tuple[str, ...]:
|
|
134
|
+
seen: set[str] = set()
|
|
135
|
+
ordered: list[str] = []
|
|
136
|
+
for value in values:
|
|
137
|
+
if value not in seen:
|
|
138
|
+
ordered.append(value)
|
|
139
|
+
seen.add(value)
|
|
140
|
+
return tuple(ordered)
|
|
File without changes
|
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
from augint_opencodex.profiles import ResolvedPolicy
|
|
8
|
+
|
|
9
|
+
INSTRUCTION_FILE = Path(".ai-opencodex.md")
|
|
10
|
+
LEGACY_INSTRUCTION_FILE = Path(".ai-codex.md")
|
|
11
|
+
|
|
12
|
+
MANAGED_FILE_PATHS = (
|
|
13
|
+
Path(".codex/config.toml"),
|
|
14
|
+
INSTRUCTION_FILE,
|
|
15
|
+
Path("opencode.json"),
|
|
16
|
+
Path(".agents/skills/README.md"),
|
|
17
|
+
Path(".agents/skills/org-python-tooling/SKILL.md"),
|
|
18
|
+
LEGACY_INSTRUCTION_FILE,
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@dataclass(frozen=True)
|
|
23
|
+
class RenderedFile:
|
|
24
|
+
"""A generated file and the content that should be written to it."""
|
|
25
|
+
|
|
26
|
+
path: Path
|
|
27
|
+
content: str
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def render_project(policy: ResolvedPolicy) -> tuple[RenderedFile, ...]:
|
|
31
|
+
"""Render all files for the first working slice."""
|
|
32
|
+
|
|
33
|
+
files = [
|
|
34
|
+
RenderedFile(Path(".codex/config.toml"), _render_codex_config(policy)),
|
|
35
|
+
RenderedFile(INSTRUCTION_FILE, _render_ai_opencodex_markdown(policy)),
|
|
36
|
+
RenderedFile(Path(".agents/skills/README.md"), _render_skills_readme()),
|
|
37
|
+
]
|
|
38
|
+
|
|
39
|
+
if policy.opencode_enabled:
|
|
40
|
+
files.append(RenderedFile(Path("opencode.json"), _render_opencode_json(policy)))
|
|
41
|
+
|
|
42
|
+
if policy.org_python_library:
|
|
43
|
+
files.append(
|
|
44
|
+
RenderedFile(
|
|
45
|
+
Path(".agents/skills/org-python-tooling/SKILL.md"),
|
|
46
|
+
_render_org_python_tooling_skill(),
|
|
47
|
+
)
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
return tuple(files)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def managed_file_paths() -> tuple[Path, ...]:
|
|
54
|
+
"""Return all paths managed by the first working slice."""
|
|
55
|
+
|
|
56
|
+
return MANAGED_FILE_PATHS
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def _render_codex_config(policy: ResolvedPolicy) -> str:
|
|
60
|
+
return (
|
|
61
|
+
"# Generated by ai-codex from .ai-codex.json.\n"
|
|
62
|
+
"# Do not edit by hand; re-run `ai-codex sync` after manifest changes.\n"
|
|
63
|
+
"# Run `CODEX_HOME=$(pwd)/.codex codex` to use this generated config as Codex home config.\n"
|
|
64
|
+
f'web_search = "{policy.codex_web_search}"\n'
|
|
65
|
+
f'approval_policy = "{policy.codex_approval_policy}"\n'
|
|
66
|
+
f'sandbox_mode = "{policy.codex_sandbox_mode}"\n'
|
|
67
|
+
'model_instructions_file = "../.ai-opencodex.md"\n'
|
|
68
|
+
'project_doc_fallback_filenames = [".ai-opencodex.md"]\n'
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def _render_ai_opencodex_markdown(policy: ResolvedPolicy) -> str:
|
|
73
|
+
lines = [
|
|
74
|
+
"# AI OpenCodex Project Instructions",
|
|
75
|
+
"",
|
|
76
|
+
"This file is generated from `.ai-codex.json` by `ai-codex sync`.",
|
|
77
|
+
"This repository uses `.ai-opencodex.md` as its generated repo-local instructions file.",
|
|
78
|
+
"Avoid a root `AGENTS.md` here because Codex gives `AGENTS.md` precedence over fallback filenames in the same directory.",
|
|
79
|
+
"Edit the manifest and re-run the tool instead of editing this file directly.",
|
|
80
|
+
"",
|
|
81
|
+
"## Working Contract",
|
|
82
|
+
]
|
|
83
|
+
lines.extend(f"- {bullet}" for bullet in policy.instruction_bullets)
|
|
84
|
+
lines.extend(
|
|
85
|
+
[
|
|
86
|
+
"",
|
|
87
|
+
"## Active Profile",
|
|
88
|
+
f"- Profile: `{policy.profile_name}`",
|
|
89
|
+
f"- Summary: {policy.profile_summary}",
|
|
90
|
+
"",
|
|
91
|
+
"## References",
|
|
92
|
+
]
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
if policy.references:
|
|
96
|
+
lines.extend(f"- `{reference}`" for reference in policy.references)
|
|
97
|
+
else:
|
|
98
|
+
lines.append("- No explicit reference repositories were configured.")
|
|
99
|
+
|
|
100
|
+
lines.extend(
|
|
101
|
+
[
|
|
102
|
+
"",
|
|
103
|
+
"## Content Policy",
|
|
104
|
+
(
|
|
105
|
+
"- Do not use emojis in documentation or generated text unless explicitly requested."
|
|
106
|
+
if policy.no_emojis
|
|
107
|
+
else "- Emoji use is allowed."
|
|
108
|
+
),
|
|
109
|
+
(
|
|
110
|
+
"- Do not mention AI in commit messages, code comments, or generated code unless explicitly requested."
|
|
111
|
+
if policy.no_ai_mentions
|
|
112
|
+
else "- AI mentions are allowed when they improve clarity."
|
|
113
|
+
),
|
|
114
|
+
"",
|
|
115
|
+
"## Blocked Paths",
|
|
116
|
+
]
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
lines.extend(f"- `{blocked_path}`" for blocked_path in policy.blocked_paths)
|
|
120
|
+
lines.extend(
|
|
121
|
+
[
|
|
122
|
+
"- These are instruction-level safeguards here; use stronger runtime controls when a path must be truly inaccessible.",
|
|
123
|
+
"",
|
|
124
|
+
"## Shell Guardrails",
|
|
125
|
+
]
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
if policy.ask_patterns:
|
|
129
|
+
lines.extend(f"- Ask before running `{pattern}`" for pattern in policy.ask_patterns)
|
|
130
|
+
if policy.deny_patterns:
|
|
131
|
+
lines.extend(f"- Deny `{pattern}`" for pattern in policy.deny_patterns)
|
|
132
|
+
if not policy.ask_patterns and not policy.deny_patterns:
|
|
133
|
+
lines.append("- No additional shell guardrails were configured.")
|
|
134
|
+
|
|
135
|
+
if policy.org_python_library:
|
|
136
|
+
lines.extend(
|
|
137
|
+
[
|
|
138
|
+
"",
|
|
139
|
+
"## Organizational Python Tooling Standard",
|
|
140
|
+
"- Use a `uv`-first workflow for dependency management, locking, and builds.",
|
|
141
|
+
"- Keep Python packages in `src/` and expose CLIs through `[project.scripts]`.",
|
|
142
|
+
"- Preserve `ruff`, `mypy`, `pytest`, coverage, and `pre-commit` as standard tooling.",
|
|
143
|
+
"- Keep Conventional Commit discipline and semantic-release-compatible versioning in place.",
|
|
144
|
+
"- Maintain code-quality, security, compliance, and build-validation jobs in CI.",
|
|
145
|
+
"- Keep a stable task surface such as `install`, `test`, `format`, `typecheck`, `security`, and `build`.",
|
|
146
|
+
]
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
if not policy.opencode_enabled:
|
|
150
|
+
lines.extend(
|
|
151
|
+
[
|
|
152
|
+
"",
|
|
153
|
+
"## OpenCode",
|
|
154
|
+
"- OpenCode output is disabled by `opencode.enabled = false` in the manifest.",
|
|
155
|
+
]
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
lines.append("")
|
|
159
|
+
return "\n".join(lines)
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
def _render_opencode_json(policy: ResolvedPolicy) -> str:
|
|
163
|
+
instructions = [
|
|
164
|
+
"Read .ai-opencodex.md before making changes.",
|
|
165
|
+
"Treat .ai-opencodex.md and .codex/config.toml as the canonical local project policy.",
|
|
166
|
+
"This repository intentionally avoids a root AGENTS.md so Codex and OpenCode follow the same generated instructions artifact.",
|
|
167
|
+
"Use .agents/skills/ when shared reusable skills are available.",
|
|
168
|
+
]
|
|
169
|
+
if policy.blocked_paths:
|
|
170
|
+
instructions.append(
|
|
171
|
+
"Avoid blocked paths called out in .ai-opencodex.md unless the user explicitly changes policy."
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
opencode = {
|
|
175
|
+
"$schema": "https://opencode.ai/config.json",
|
|
176
|
+
"instructions": instructions,
|
|
177
|
+
"permission": {
|
|
178
|
+
"read": "allow",
|
|
179
|
+
"edit": "allow",
|
|
180
|
+
"glob": "allow",
|
|
181
|
+
"grep": "allow",
|
|
182
|
+
"list": "allow",
|
|
183
|
+
"bash": "ask" if policy.profile_name == "gov" else "allow",
|
|
184
|
+
"skill": "allow",
|
|
185
|
+
"webfetch": "allow",
|
|
186
|
+
"websearch": "allow",
|
|
187
|
+
"external_directory": "ask",
|
|
188
|
+
"doom_loop": "ask",
|
|
189
|
+
},
|
|
190
|
+
}
|
|
191
|
+
return json.dumps(opencode, indent=2) + "\n"
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
def _render_skills_readme() -> str:
|
|
195
|
+
return (
|
|
196
|
+
"# Shared Skills Directory\n\n"
|
|
197
|
+
"This directory is generated by `ai-codex sync` for reusable repo-local skills.\n"
|
|
198
|
+
"Update `.ai-codex.json` and re-run the tool instead of editing generated files by hand.\n"
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
def _render_org_python_tooling_skill() -> str:
|
|
203
|
+
return (
|
|
204
|
+
"# Org Python Tooling Standard\n\n"
|
|
205
|
+
"Use this skill when working in a repository that follows the organizational Python library or tool standard.\n\n"
|
|
206
|
+
"## Standard\n\n"
|
|
207
|
+
"- Use `uv` for dependency management, locking, testing, and builds.\n"
|
|
208
|
+
"- Keep a `src/` package layout and expose entry points through `[project.scripts]`.\n"
|
|
209
|
+
"- Preserve `ruff`, `mypy`, `pytest`, coverage thresholds, and `pre-commit`.\n"
|
|
210
|
+
"- Maintain Conventional Commit discipline and semantic-release-compatible versioning.\n"
|
|
211
|
+
"- Keep security and compliance checks in CI, including dependency and license scanning.\n"
|
|
212
|
+
"- Prefer a stable Makefile task surface over ad hoc project-specific commands.\n"
|
|
213
|
+
)
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Literal
|
|
6
|
+
|
|
7
|
+
from augint_opencodex.ignore import ExcludeUpdate, ensure_git_exclude
|
|
8
|
+
from augint_opencodex.manifest import load_manifest
|
|
9
|
+
from augint_opencodex.profiles import ResolvedPolicy, resolve_policy
|
|
10
|
+
from augint_opencodex.renderer import managed_file_paths, render_project
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass(frozen=True)
|
|
14
|
+
class SyncReport:
|
|
15
|
+
"""The result of resolving and optionally writing generated files."""
|
|
16
|
+
|
|
17
|
+
mode: Literal["write", "dry-run", "check"]
|
|
18
|
+
policy: ResolvedPolicy
|
|
19
|
+
changed_files: tuple[Path, ...]
|
|
20
|
+
stale_files: tuple[Path, ...]
|
|
21
|
+
written_files: tuple[Path, ...]
|
|
22
|
+
removed_files: tuple[Path, ...]
|
|
23
|
+
unchanged_files: tuple[Path, ...]
|
|
24
|
+
exclude_update: ExcludeUpdate
|
|
25
|
+
|
|
26
|
+
@property
|
|
27
|
+
def needs_changes(self) -> bool:
|
|
28
|
+
return bool(self.changed_files or self.stale_files) or self.exclude_update.needs_update
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def sync_project(
|
|
32
|
+
project_root: Path,
|
|
33
|
+
manifest_path: Path | None = None,
|
|
34
|
+
*,
|
|
35
|
+
mode: Literal["write", "dry-run", "check"] = "write",
|
|
36
|
+
) -> SyncReport:
|
|
37
|
+
"""Load the manifest, resolve policy, and render local project files."""
|
|
38
|
+
|
|
39
|
+
resolved_root = project_root.resolve()
|
|
40
|
+
resolved_manifest_path = _resolve_manifest_path(resolved_root, manifest_path)
|
|
41
|
+
manifest = load_manifest(resolved_manifest_path)
|
|
42
|
+
policy = resolve_policy(manifest, resolved_root, resolved_manifest_path)
|
|
43
|
+
rendered_files = render_project(policy)
|
|
44
|
+
|
|
45
|
+
changed_files: list[Path] = []
|
|
46
|
+
stale_files: list[Path] = []
|
|
47
|
+
written_files: list[Path] = []
|
|
48
|
+
removed_files: list[Path] = []
|
|
49
|
+
unchanged_files: list[Path] = []
|
|
50
|
+
|
|
51
|
+
for rendered_file in rendered_files:
|
|
52
|
+
target_path = resolved_root / rendered_file.path
|
|
53
|
+
if target_path.exists():
|
|
54
|
+
current_content = target_path.read_text(encoding="utf-8")
|
|
55
|
+
else:
|
|
56
|
+
current_content = None
|
|
57
|
+
|
|
58
|
+
if current_content == rendered_file.content:
|
|
59
|
+
unchanged_files.append(rendered_file.path)
|
|
60
|
+
continue
|
|
61
|
+
|
|
62
|
+
changed_files.append(rendered_file.path)
|
|
63
|
+
if mode == "write":
|
|
64
|
+
target_path.parent.mkdir(parents=True, exist_ok=True)
|
|
65
|
+
target_path.write_text(rendered_file.content, encoding="utf-8")
|
|
66
|
+
written_files.append(rendered_file.path)
|
|
67
|
+
|
|
68
|
+
expected_paths = {rendered_file.path for rendered_file in rendered_files}
|
|
69
|
+
for managed_path in managed_file_paths():
|
|
70
|
+
if managed_path in expected_paths:
|
|
71
|
+
continue
|
|
72
|
+
|
|
73
|
+
target_path = resolved_root / managed_path
|
|
74
|
+
if not target_path.exists():
|
|
75
|
+
continue
|
|
76
|
+
|
|
77
|
+
stale_files.append(managed_path)
|
|
78
|
+
if mode == "write":
|
|
79
|
+
target_path.unlink()
|
|
80
|
+
_prune_empty_parent_dirs(target_path.parent, stop_at=resolved_root)
|
|
81
|
+
removed_files.append(managed_path)
|
|
82
|
+
|
|
83
|
+
exclude_update = ensure_git_exclude(resolved_root, write=mode == "write")
|
|
84
|
+
return SyncReport(
|
|
85
|
+
mode=mode,
|
|
86
|
+
policy=policy,
|
|
87
|
+
changed_files=tuple(changed_files),
|
|
88
|
+
stale_files=tuple(stale_files),
|
|
89
|
+
written_files=tuple(written_files),
|
|
90
|
+
removed_files=tuple(removed_files),
|
|
91
|
+
unchanged_files=tuple(unchanged_files),
|
|
92
|
+
exclude_update=exclude_update,
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def _resolve_manifest_path(project_root: Path, manifest_path: Path | None) -> Path:
|
|
97
|
+
if manifest_path is not None:
|
|
98
|
+
return manifest_path.resolve()
|
|
99
|
+
return (project_root / ".ai-codex.json").resolve()
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def _prune_empty_parent_dirs(path: Path, *, stop_at: Path) -> None:
|
|
103
|
+
current = path
|
|
104
|
+
resolved_stop = stop_at.resolve()
|
|
105
|
+
while current != resolved_stop and current.exists():
|
|
106
|
+
try:
|
|
107
|
+
current.rmdir()
|
|
108
|
+
except OSError:
|
|
109
|
+
break
|
|
110
|
+
current = current.parent
|