llm-ide-rules 0.2.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- llm_ide_rules-0.2.0/PKG-INFO +134 -0
- llm_ide_rules-0.2.0/README.md +120 -0
- llm_ide_rules-0.2.0/pyproject.toml +21 -0
- llm_ide_rules-0.2.0/src/llm_ide_rules/__init__.py +33 -0
- llm_ide_rules-0.2.0/src/llm_ide_rules/__main__.py +6 -0
- llm_ide_rules-0.2.0/src/llm_ide_rules/commands/download.py +253 -0
- llm_ide_rules-0.2.0/src/llm_ide_rules/commands/explode.py +336 -0
- llm_ide_rules-0.2.0/src/llm_ide_rules/commands/implode.py +218 -0
- llm_ide_rules-0.2.0/src/llm_ide_rules/constants.py +36 -0
- llm_ide_rules-0.2.0/src/llm_ide_rules/sections.json +18 -0
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: llm-ide-rules
|
|
3
|
+
Version: 0.2.0
|
|
4
|
+
Summary: CLI tool for managing LLM IDE prompts and rules
|
|
5
|
+
Keywords: llm,ide,prompts,cursor,copilot
|
|
6
|
+
Author: Michael Bianco
|
|
7
|
+
Author-email: Michael Bianco <mike@mikebian.co>
|
|
8
|
+
Requires-Dist: typer>=0.9.0
|
|
9
|
+
Requires-Dist: structlog>=23.2.0
|
|
10
|
+
Requires-Dist: requests>=2.25.0
|
|
11
|
+
Requires-Python: >=3.9
|
|
12
|
+
Project-URL: Repository, https://github.com/iloveitaly/llm-ide-rules
|
|
13
|
+
Description-Content-Type: text/markdown
|
|
14
|
+
|
|
15
|
+
# Copilot & Cursor LLM Instructions
|
|
16
|
+
|
|
17
|
+
Going to try to centralize all my prompts in a single place and create some scripts to help convert from copilot to cursor, etc.
|
|
18
|
+
|
|
19
|
+
I don't want to be tied to a specific IDE and it's a pain to have to edit instructions for various languages across a ton of different files.
|
|
20
|
+
|
|
21
|
+
Additionally, it becomes challenging to copy these prompts into various projects and contribute them back to a single location.
|
|
22
|
+
|
|
23
|
+
Some of the glob assumptions in this repo are specific to how I've chosen to organize python and typescript [in the python starter template](https://github.com/iloveitaly/python-starter-template) and what tooling (fastapi, etc) that I've chosen to use.
|
|
24
|
+
|
|
25
|
+
## Installation
|
|
26
|
+
|
|
27
|
+
You can run the `llm-ide-rules` CLI tool using uvx:
|
|
28
|
+
|
|
29
|
+
```sh
|
|
30
|
+
uvx llm-ide-rules
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
Or install from the repository:
|
|
34
|
+
|
|
35
|
+
```sh
|
|
36
|
+
uv tool install git+https://github.com/iloveitaly/llm-ide-rules.git
|
|
37
|
+
```
|
|
38
|
+
|
|
39
|
+
```sh
|
|
40
|
+
git clone https://github.com/iloveitaly/llm-ide-rules.git
|
|
41
|
+
cd llm-ide-rules
|
|
42
|
+
uv sync
|
|
43
|
+
source .venv/bin/activate
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
## Usage
|
|
47
|
+
|
|
48
|
+
### CLI Commands
|
|
49
|
+
|
|
50
|
+
The `llm-ide-rules` CLI provides commands to manage LLM IDE prompts and rules:
|
|
51
|
+
|
|
52
|
+
```sh
|
|
53
|
+
# Convert instruction file to separate rule files
|
|
54
|
+
uvx llm-ide-rules explode [input_file]
|
|
55
|
+
|
|
56
|
+
# Bundle rule files back into a single instruction file
|
|
57
|
+
uvx llm-ide-rules implode cursor [output_file] # Bundle Cursor rules
|
|
58
|
+
uvx llm-ide-rules implode github [output_file] # Bundle GitHub/Copilot instructions
|
|
59
|
+
|
|
60
|
+
# Download instruction files from repositories
|
|
61
|
+
uvx llm-ide-rules download [instruction_types] # Download everything by default
|
|
62
|
+
uvx llm-ide-rules download cursor github # Download specific types
|
|
63
|
+
uvx llm-ide-rules download --repo other/repo # Download from different repo
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
```
|
|
67
|
+
|
|
68
|
+
### Examples
|
|
69
|
+
|
|
70
|
+
```sh
|
|
71
|
+
# Explode instructions.md into .cursor/rules/ and .github/instructions/
|
|
72
|
+
uvx llm-ide-rules explode instructions.md
|
|
73
|
+
|
|
74
|
+
# Bundle Cursor rules back into a single file
|
|
75
|
+
uvx llm-ide-rules implode cursor bundled-instructions.md
|
|
76
|
+
|
|
77
|
+
# Bundle GitHub instructions with verbose logging
|
|
78
|
+
uvx llm-ide-rules implode github --verbose instructions.md
|
|
79
|
+
|
|
80
|
+
# Download everything from default repository
|
|
81
|
+
uvx llm-ide-rules download
|
|
82
|
+
|
|
83
|
+
# Download only specific instruction types
|
|
84
|
+
uvx llm-ide-rules download cursor github
|
|
85
|
+
|
|
86
|
+
# Download from a different repository
|
|
87
|
+
uvx llm-ide-rules download --repo other-user/other-repo --target ./my-project
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
## Development
|
|
91
|
+
|
|
92
|
+
### Using the CLI for Development
|
|
93
|
+
|
|
94
|
+
The CLI replaces the old standalone scripts. Use the CLI commands in your development workflow:
|
|
95
|
+
|
|
96
|
+
```shell
|
|
97
|
+
# Setup the environment
|
|
98
|
+
uv sync
|
|
99
|
+
|
|
100
|
+
# Explode instructions into separate rule files
|
|
101
|
+
uvx llm-ide-rules explode
|
|
102
|
+
|
|
103
|
+
# Bundle rules back into instructions
|
|
104
|
+
uvx llm-ide-rules implode cursor instructions.md
|
|
105
|
+
```
|
|
106
|
+
|
|
107
|
+
### Building and Testing
|
|
108
|
+
|
|
109
|
+
```shell
|
|
110
|
+
# Build the package
|
|
111
|
+
uv build
|
|
112
|
+
|
|
113
|
+
# Run tests
|
|
114
|
+
pytest
|
|
115
|
+
```
|
|
116
|
+
|
|
117
|
+
## Extracting Changes
|
|
118
|
+
|
|
119
|
+
The idea of this repo is you'll copy prompts into your various projects. Then, if you improve a prompt in a project, you can pull that change into this upstream repo.
|
|
120
|
+
|
|
121
|
+
Here's how to do it:
|
|
122
|
+
|
|
123
|
+
```shell
|
|
124
|
+
git diff .github/instructions | pbcopy
|
|
125
|
+
pbpaste | gpatch -p1
|
|
126
|
+
```
|
|
127
|
+
|
|
128
|
+
`gpatch` is an updated version of patch on macOS that seems to work much better for me.
|
|
129
|
+
|
|
130
|
+
## Related Links
|
|
131
|
+
|
|
132
|
+
* https://cursor.directory/rules
|
|
133
|
+
* https://github.com/PatrickJS/awesome-cursorrules
|
|
134
|
+
* https://www.cursorprompts.org
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
# Copilot & Cursor LLM Instructions
|
|
2
|
+
|
|
3
|
+
Going to try to centralize all my prompts in a single place and create some scripts to help convert from copilot to cursor, etc.
|
|
4
|
+
|
|
5
|
+
I don't want to be tied to a specific IDE and it's a pain to have to edit instructions for various languages across a ton of different files.
|
|
6
|
+
|
|
7
|
+
Additionally, it becomes challenging to copy these prompts into various projects and contribute them back to a single location.
|
|
8
|
+
|
|
9
|
+
Some of the glob assumptions in this repo are specific to how I've chosen to organize python and typescript [in the python starter template](https://github.com/iloveitaly/python-starter-template) and what tooling (fastapi, etc) that I've chosen to use.
|
|
10
|
+
|
|
11
|
+
## Installation
|
|
12
|
+
|
|
13
|
+
You can run the `llm-ide-rules` CLI tool using uvx:
|
|
14
|
+
|
|
15
|
+
```sh
|
|
16
|
+
uvx llm-ide-rules
|
|
17
|
+
```
|
|
18
|
+
|
|
19
|
+
Or install from the repository:
|
|
20
|
+
|
|
21
|
+
```sh
|
|
22
|
+
uv tool install git+https://github.com/iloveitaly/llm-ide-rules.git
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
```sh
|
|
26
|
+
git clone https://github.com/iloveitaly/llm-ide-rules.git
|
|
27
|
+
cd llm-ide-rules
|
|
28
|
+
uv sync
|
|
29
|
+
source .venv/bin/activate
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
## Usage
|
|
33
|
+
|
|
34
|
+
### CLI Commands
|
|
35
|
+
|
|
36
|
+
The `llm-ide-rules` CLI provides commands to manage LLM IDE prompts and rules:
|
|
37
|
+
|
|
38
|
+
```sh
|
|
39
|
+
# Convert instruction file to separate rule files
|
|
40
|
+
uvx llm-ide-rules explode [input_file]
|
|
41
|
+
|
|
42
|
+
# Bundle rule files back into a single instruction file
|
|
43
|
+
uvx llm-ide-rules implode cursor [output_file] # Bundle Cursor rules
|
|
44
|
+
uvx llm-ide-rules implode github [output_file] # Bundle GitHub/Copilot instructions
|
|
45
|
+
|
|
46
|
+
# Download instruction files from repositories
|
|
47
|
+
uvx llm-ide-rules download [instruction_types] # Download everything by default
|
|
48
|
+
uvx llm-ide-rules download cursor github # Download specific types
|
|
49
|
+
uvx llm-ide-rules download --repo other/repo # Download from different repo
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
### Examples
|
|
55
|
+
|
|
56
|
+
```sh
|
|
57
|
+
# Explode instructions.md into .cursor/rules/ and .github/instructions/
|
|
58
|
+
uvx llm-ide-rules explode instructions.md
|
|
59
|
+
|
|
60
|
+
# Bundle Cursor rules back into a single file
|
|
61
|
+
uvx llm-ide-rules implode cursor bundled-instructions.md
|
|
62
|
+
|
|
63
|
+
# Bundle GitHub instructions with verbose logging
|
|
64
|
+
uvx llm-ide-rules implode github --verbose instructions.md
|
|
65
|
+
|
|
66
|
+
# Download everything from default repository
|
|
67
|
+
uvx llm-ide-rules download
|
|
68
|
+
|
|
69
|
+
# Download only specific instruction types
|
|
70
|
+
uvx llm-ide-rules download cursor github
|
|
71
|
+
|
|
72
|
+
# Download from a different repository
|
|
73
|
+
uvx llm-ide-rules download --repo other-user/other-repo --target ./my-project
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
## Development
|
|
77
|
+
|
|
78
|
+
### Using the CLI for Development
|
|
79
|
+
|
|
80
|
+
The CLI replaces the old standalone scripts. Use the CLI commands in your development workflow:
|
|
81
|
+
|
|
82
|
+
```shell
|
|
83
|
+
# Setup the environment
|
|
84
|
+
uv sync
|
|
85
|
+
|
|
86
|
+
# Explode instructions into separate rule files
|
|
87
|
+
uvx llm-ide-rules explode
|
|
88
|
+
|
|
89
|
+
# Bundle rules back into instructions
|
|
90
|
+
uvx llm-ide-rules implode cursor instructions.md
|
|
91
|
+
```
|
|
92
|
+
|
|
93
|
+
### Building and Testing
|
|
94
|
+
|
|
95
|
+
```shell
|
|
96
|
+
# Build the package
|
|
97
|
+
uv build
|
|
98
|
+
|
|
99
|
+
# Run tests
|
|
100
|
+
pytest
|
|
101
|
+
```
|
|
102
|
+
|
|
103
|
+
## Extracting Changes
|
|
104
|
+
|
|
105
|
+
The idea of this repo is you'll copy prompts into your various projects. Then, if you improve a prompt in a project, you can pull that change into this upstream repo.
|
|
106
|
+
|
|
107
|
+
Here's how to do it:
|
|
108
|
+
|
|
109
|
+
```shell
|
|
110
|
+
git diff .github/instructions | pbcopy
|
|
111
|
+
pbpaste | gpatch -p1
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
`gpatch` is an updated version of patch on macOS that seems to work much better for me.
|
|
115
|
+
|
|
116
|
+
## Related Links
|
|
117
|
+
|
|
118
|
+
* https://cursor.directory/rules
|
|
119
|
+
* https://github.com/PatrickJS/awesome-cursorrules
|
|
120
|
+
* https://www.cursorprompts.org
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "llm-ide-rules"
|
|
3
|
+
version = "0.2.0"
|
|
4
|
+
description = "CLI tool for managing LLM IDE prompts and rules"
|
|
5
|
+
keywords = ["llm", "ide", "prompts", "cursor", "copilot"]
|
|
6
|
+
readme = "README.md"
|
|
7
|
+
requires-python = ">=3.9"
|
|
8
|
+
dependencies = ["typer>=0.9.0", "structlog>=23.2.0", "requests>=2.25.0"]
|
|
9
|
+
authors = [{ name = "Michael Bianco", email = "mike@mikebian.co" }]
|
|
10
|
+
urls = { "Repository" = "https://github.com/iloveitaly/llm-ide-rules" }
|
|
11
|
+
|
|
12
|
+
# additional packaging information: https://packaging.python.org/en/latest/specifications/core-metadata/#license
|
|
13
|
+
[project.scripts]
|
|
14
|
+
llm_ide_rules = "llm_ide_rules:main"
|
|
15
|
+
|
|
16
|
+
[build-system]
|
|
17
|
+
requires = ["uv_build>=0.8.11,<0.9.0"]
|
|
18
|
+
build-backend = "uv_build"
|
|
19
|
+
|
|
20
|
+
[dependency-groups]
|
|
21
|
+
dev = ["pytest>=8.3.3"]
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
"""LLM Rules CLI package for managing IDE prompts and rules."""
|
|
2
|
+
|
|
3
|
+
import typer
|
|
4
|
+
from typing_extensions import Annotated
|
|
5
|
+
|
|
6
|
+
from llm_ide_rules.commands.explode import explode_main
|
|
7
|
+
from llm_ide_rules.commands.implode import cursor, github
|
|
8
|
+
from llm_ide_rules.commands.download import download_main
|
|
9
|
+
|
|
10
|
+
__version__ = "0.2.0"
|
|
11
|
+
|
|
12
|
+
app = typer.Typer(
|
|
13
|
+
name="llm_ide_rules",
|
|
14
|
+
help="CLI tool for managing LLM IDE prompts and rules",
|
|
15
|
+
no_args_is_help=True,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
# Add commands directly
|
|
19
|
+
app.command("explode", help="Convert instruction file to separate rule files")(explode_main)
|
|
20
|
+
app.command("download", help="Download LLM instruction files from GitHub repositories")(download_main)
|
|
21
|
+
|
|
22
|
+
# Create implode sub-typer
|
|
23
|
+
implode_app = typer.Typer(help="Bundle rule files into a single instruction file")
|
|
24
|
+
implode_app.command("cursor", help="Bundle Cursor rules into a single file")(cursor)
|
|
25
|
+
implode_app.command("github", help="Bundle GitHub/Copilot instructions into a single file")(github)
|
|
26
|
+
app.add_typer(implode_app, name="implode")
|
|
27
|
+
|
|
28
|
+
def main():
|
|
29
|
+
"""Main entry point for the CLI."""
|
|
30
|
+
app()
|
|
31
|
+
|
|
32
|
+
if __name__ == "__main__":
|
|
33
|
+
main()
|
|
@@ -0,0 +1,253 @@
|
|
|
1
|
+
"""Download command: Download LLM instruction files from GitHub repositories."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import tempfile
|
|
5
|
+
import zipfile
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import List
|
|
8
|
+
|
|
9
|
+
import requests
|
|
10
|
+
import structlog
|
|
11
|
+
import typer
|
|
12
|
+
from typing_extensions import Annotated
|
|
13
|
+
|
|
14
|
+
logger = structlog.get_logger()
|
|
15
|
+
|
|
16
|
+
DEFAULT_REPO = "iloveitaly/llm_ide_rules"
|
|
17
|
+
DEFAULT_BRANCH = "master"
|
|
18
|
+
|
|
19
|
+
# Define what files/directories each instruction type includes
|
|
20
|
+
INSTRUCTION_TYPES = {
|
|
21
|
+
"cursor": {"directories": [".cursor"], "files": []},
|
|
22
|
+
"github": {
|
|
23
|
+
"directories": [".github"],
|
|
24
|
+
"files": [],
|
|
25
|
+
"exclude_patterns": ["workflows/*"],
|
|
26
|
+
},
|
|
27
|
+
"gemini": {"directories": [], "files": ["GEMINI.md"]},
|
|
28
|
+
"claude": {"directories": [], "files": ["CLAUDE.md"]},
|
|
29
|
+
"agent": {"directories": [], "files": ["AGENT.md"]},
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
# Default types to download when no specific types are specified
|
|
33
|
+
DEFAULT_TYPES = list(INSTRUCTION_TYPES.keys())
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def download_and_extract_repo(repo: str, branch: str = DEFAULT_BRANCH) -> Path:
|
|
37
|
+
"""Download a GitHub repository as a ZIP and extract it to a temporary directory."""
|
|
38
|
+
zip_url = f"https://github.com/{repo}/archive/{branch}.zip"
|
|
39
|
+
|
|
40
|
+
logger.info("Downloading repository", repo=repo, branch=branch, url=zip_url)
|
|
41
|
+
|
|
42
|
+
try:
|
|
43
|
+
response = requests.get(zip_url, timeout=30)
|
|
44
|
+
response.raise_for_status()
|
|
45
|
+
except requests.RequestException as e:
|
|
46
|
+
logger.error("Failed to download repository", error=str(e), url=zip_url)
|
|
47
|
+
raise typer.Exit(1)
|
|
48
|
+
|
|
49
|
+
# Create temporary directory and file
|
|
50
|
+
temp_dir = Path(tempfile.mkdtemp())
|
|
51
|
+
zip_path = temp_dir / "repo.zip"
|
|
52
|
+
|
|
53
|
+
# Write ZIP content
|
|
54
|
+
zip_path.write_bytes(response.content)
|
|
55
|
+
|
|
56
|
+
# Extract ZIP
|
|
57
|
+
extract_dir = temp_dir / "extracted"
|
|
58
|
+
extract_dir.mkdir()
|
|
59
|
+
|
|
60
|
+
with zipfile.ZipFile(zip_path, "r") as zip_ref:
|
|
61
|
+
zip_ref.extractall(extract_dir)
|
|
62
|
+
|
|
63
|
+
# Find the extracted repository directory (should be the only directory)
|
|
64
|
+
repo_dirs = [d for d in extract_dir.iterdir() if d.is_dir()]
|
|
65
|
+
if not repo_dirs:
|
|
66
|
+
logger.error("No directories found in extracted ZIP")
|
|
67
|
+
raise typer.Exit(1)
|
|
68
|
+
|
|
69
|
+
repo_dir = repo_dirs[0]
|
|
70
|
+
logger.info("Repository extracted", path=str(repo_dir))
|
|
71
|
+
|
|
72
|
+
return repo_dir
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def copy_instruction_files(
|
|
76
|
+
repo_dir: Path, instruction_types: List[str], target_dir: Path
|
|
77
|
+
):
|
|
78
|
+
"""Copy instruction files from the repository to the target directory."""
|
|
79
|
+
copied_items = []
|
|
80
|
+
|
|
81
|
+
for inst_type in instruction_types:
|
|
82
|
+
if inst_type not in INSTRUCTION_TYPES:
|
|
83
|
+
logger.warning("Unknown instruction type", type=inst_type)
|
|
84
|
+
continue
|
|
85
|
+
|
|
86
|
+
config = INSTRUCTION_TYPES[inst_type]
|
|
87
|
+
|
|
88
|
+
# Copy directories
|
|
89
|
+
for dir_name in config["directories"]:
|
|
90
|
+
source_dir = repo_dir / dir_name
|
|
91
|
+
target_subdir = target_dir / dir_name
|
|
92
|
+
|
|
93
|
+
if source_dir.exists():
|
|
94
|
+
logger.info(
|
|
95
|
+
"Copying directory",
|
|
96
|
+
source=str(source_dir),
|
|
97
|
+
target=str(target_subdir),
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
# Create target directory
|
|
101
|
+
target_subdir.mkdir(parents=True, exist_ok=True)
|
|
102
|
+
|
|
103
|
+
# Copy all files from source to target
|
|
104
|
+
copy_directory_contents(
|
|
105
|
+
source_dir, target_subdir, config.get("exclude_patterns", [])
|
|
106
|
+
)
|
|
107
|
+
copied_items.append(f"{dir_name}/")
|
|
108
|
+
|
|
109
|
+
# Copy individual files
|
|
110
|
+
for file_name in config["files"]:
|
|
111
|
+
source_file = repo_dir / file_name
|
|
112
|
+
target_file = target_dir / file_name
|
|
113
|
+
|
|
114
|
+
if source_file.exists():
|
|
115
|
+
logger.info(
|
|
116
|
+
"Copying file", source=str(source_file), target=str(target_file)
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
# Create parent directories if needed
|
|
120
|
+
target_file.parent.mkdir(parents=True, exist_ok=True)
|
|
121
|
+
|
|
122
|
+
# Copy file
|
|
123
|
+
target_file.write_bytes(source_file.read_bytes())
|
|
124
|
+
copied_items.append(file_name)
|
|
125
|
+
|
|
126
|
+
return copied_items
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def copy_directory_contents(
|
|
130
|
+
source_dir: Path, target_dir: Path, exclude_patterns: List[str]
|
|
131
|
+
):
|
|
132
|
+
"""Recursively copy directory contents, excluding specified patterns."""
|
|
133
|
+
for item in source_dir.rglob("*"):
|
|
134
|
+
if item.is_file():
|
|
135
|
+
relative_path = item.relative_to(source_dir)
|
|
136
|
+
relative_str = str(relative_path)
|
|
137
|
+
|
|
138
|
+
# Check if file matches any exclude pattern
|
|
139
|
+
should_exclude = False
|
|
140
|
+
for pattern in exclude_patterns:
|
|
141
|
+
if pattern.endswith("/*"):
|
|
142
|
+
# Pattern like "workflows/*" - exclude if path starts with "workflows/"
|
|
143
|
+
pattern_prefix = pattern[:-1] # Remove the "*"
|
|
144
|
+
if relative_str.startswith(pattern_prefix):
|
|
145
|
+
should_exclude = True
|
|
146
|
+
break
|
|
147
|
+
elif relative_str == pattern:
|
|
148
|
+
should_exclude = True
|
|
149
|
+
break
|
|
150
|
+
|
|
151
|
+
if should_exclude:
|
|
152
|
+
logger.debug("Excluding file", file=relative_str, pattern=pattern)
|
|
153
|
+
continue
|
|
154
|
+
|
|
155
|
+
target_file = target_dir / relative_path
|
|
156
|
+
target_file.parent.mkdir(parents=True, exist_ok=True)
|
|
157
|
+
target_file.write_bytes(item.read_bytes())
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def download_main(
|
|
161
|
+
instruction_types: Annotated[
|
|
162
|
+
List[str],
|
|
163
|
+
typer.Argument(
|
|
164
|
+
help="Types of instructions to download (cursor, github, gemini, claude, agent). Downloads everything by default."
|
|
165
|
+
),
|
|
166
|
+
] = None,
|
|
167
|
+
repo: Annotated[
|
|
168
|
+
str, typer.Option("--repo", "-r", help="GitHub repository to download from")
|
|
169
|
+
] = DEFAULT_REPO,
|
|
170
|
+
branch: Annotated[
|
|
171
|
+
str, typer.Option("--branch", "-b", help="Branch to download from")
|
|
172
|
+
] = DEFAULT_BRANCH,
|
|
173
|
+
target_dir: Annotated[
|
|
174
|
+
str, typer.Option("--target", "-t", help="Target directory to download to")
|
|
175
|
+
] = ".",
|
|
176
|
+
verbose: Annotated[
|
|
177
|
+
bool, typer.Option("--verbose", "-v", help="Enable verbose logging")
|
|
178
|
+
] = False,
|
|
179
|
+
):
|
|
180
|
+
"""Download LLM instruction files from GitHub repositories.
|
|
181
|
+
|
|
182
|
+
This command replaces the legacy download.sh script and provides more flexibility
|
|
183
|
+
in selecting what to download and from which repository.
|
|
184
|
+
|
|
185
|
+
Examples:
|
|
186
|
+
|
|
187
|
+
\b
|
|
188
|
+
# Download everything from the default repository
|
|
189
|
+
llm_ide_rules download
|
|
190
|
+
|
|
191
|
+
\b
|
|
192
|
+
# Download only Cursor and GitHub instructions
|
|
193
|
+
llm_ide_rules download cursor github
|
|
194
|
+
|
|
195
|
+
\b
|
|
196
|
+
# Download from a different repository
|
|
197
|
+
llm_ide_rules download --repo other-user/other-repo
|
|
198
|
+
|
|
199
|
+
\b
|
|
200
|
+
# Download to a specific directory
|
|
201
|
+
llm_ide_rules download --target ./my-project
|
|
202
|
+
"""
|
|
203
|
+
if verbose:
|
|
204
|
+
logging.basicConfig(level=logging.DEBUG)
|
|
205
|
+
structlog.configure(
|
|
206
|
+
wrapper_class=structlog.make_filtering_bound_logger(logging.DEBUG),
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
# Use default types if none specified
|
|
210
|
+
if not instruction_types:
|
|
211
|
+
instruction_types = DEFAULT_TYPES
|
|
212
|
+
|
|
213
|
+
# Validate instruction types
|
|
214
|
+
invalid_types = [t for t in instruction_types if t not in INSTRUCTION_TYPES]
|
|
215
|
+
if invalid_types:
|
|
216
|
+
logger.error(
|
|
217
|
+
"Invalid instruction types",
|
|
218
|
+
invalid_types=invalid_types,
|
|
219
|
+
valid_types=list(INSTRUCTION_TYPES.keys()),
|
|
220
|
+
)
|
|
221
|
+
raise typer.Exit(1)
|
|
222
|
+
|
|
223
|
+
target_path = Path(target_dir).resolve()
|
|
224
|
+
|
|
225
|
+
logger.info(
|
|
226
|
+
"Starting download",
|
|
227
|
+
repo=repo,
|
|
228
|
+
branch=branch,
|
|
229
|
+
instruction_types=instruction_types,
|
|
230
|
+
target_dir=str(target_path),
|
|
231
|
+
)
|
|
232
|
+
|
|
233
|
+
# Download and extract repository
|
|
234
|
+
repo_dir = download_and_extract_repo(repo, branch)
|
|
235
|
+
|
|
236
|
+
try:
|
|
237
|
+
# Copy instruction files
|
|
238
|
+
copied_items = copy_instruction_files(repo_dir, instruction_types, target_path)
|
|
239
|
+
|
|
240
|
+
if copied_items:
|
|
241
|
+
logger.info("Download completed successfully", copied_items=copied_items)
|
|
242
|
+
typer.echo(f"Downloaded {len(copied_items)} items to {target_path}:")
|
|
243
|
+
for item in copied_items:
|
|
244
|
+
typer.echo(f" - {item}")
|
|
245
|
+
else:
|
|
246
|
+
logger.warning("No files were copied")
|
|
247
|
+
typer.echo("No matching instruction files found in the repository.")
|
|
248
|
+
|
|
249
|
+
finally:
|
|
250
|
+
# Clean up temporary directory
|
|
251
|
+
import shutil
|
|
252
|
+
|
|
253
|
+
shutil.rmtree(repo_dir.parent.parent, ignore_errors=True)
|
|
@@ -0,0 +1,336 @@
|
|
|
1
|
+
"""Explode command: Convert instruction file to separate rule files."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import sys
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing_extensions import Annotated
|
|
7
|
+
|
|
8
|
+
import typer
|
|
9
|
+
import structlog
|
|
10
|
+
import logging
|
|
11
|
+
|
|
12
|
+
from llm_ide_rules.constants import load_section_globs, header_to_filename
|
|
13
|
+
|
|
14
|
+
logger = structlog.get_logger()
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def generate_cursor_frontmatter(glob):
|
|
18
|
+
"""Generate Cursor rule frontmatter for a given glob pattern."""
|
|
19
|
+
return f"""---
|
|
20
|
+
description:
|
|
21
|
+
globs: {glob}
|
|
22
|
+
alwaysApply: false
|
|
23
|
+
---
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def generate_copilot_frontmatter(glob):
|
|
28
|
+
"""Generate Copilot instruction frontmatter for a given glob pattern."""
|
|
29
|
+
return f"""---
|
|
30
|
+
applyTo: "{glob}"
|
|
31
|
+
---
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def extract_general(lines):
|
|
36
|
+
"""
|
|
37
|
+
Extract lines before the first section header '## '.
|
|
38
|
+
"""
|
|
39
|
+
general = []
|
|
40
|
+
for line in lines:
|
|
41
|
+
if line.startswith("## "):
|
|
42
|
+
break
|
|
43
|
+
general.append(line)
|
|
44
|
+
return general
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def extract_section(lines, header):
|
|
48
|
+
"""
|
|
49
|
+
Extract lines under a given section header until the next header or EOF.
|
|
50
|
+
Includes the header itself in the output.
|
|
51
|
+
"""
|
|
52
|
+
content = []
|
|
53
|
+
in_section = False
|
|
54
|
+
for line in lines:
|
|
55
|
+
if in_section:
|
|
56
|
+
if line.startswith("## "):
|
|
57
|
+
break
|
|
58
|
+
content.append(line)
|
|
59
|
+
elif line.strip().lower() == header.lower():
|
|
60
|
+
in_section = True
|
|
61
|
+
content.append(line) # Include the header itself
|
|
62
|
+
return content
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def write_rule(path, header_yaml, content_lines):
|
|
66
|
+
"""
|
|
67
|
+
Write a rule file with front matter and content.
|
|
68
|
+
"""
|
|
69
|
+
trimmed_content = trim_content(content_lines)
|
|
70
|
+
with open(path, "w") as f:
|
|
71
|
+
f.write(header_yaml.strip() + "\n")
|
|
72
|
+
for line in trimmed_content:
|
|
73
|
+
f.write(line)
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def trim_content(content_lines):
|
|
77
|
+
"""Remove leading and trailing empty lines from content."""
|
|
78
|
+
# Find first non-empty line
|
|
79
|
+
start = 0
|
|
80
|
+
for i, line in enumerate(content_lines):
|
|
81
|
+
if line.strip():
|
|
82
|
+
start = i
|
|
83
|
+
break
|
|
84
|
+
else:
|
|
85
|
+
# All lines are empty
|
|
86
|
+
return []
|
|
87
|
+
|
|
88
|
+
# Find last non-empty line
|
|
89
|
+
end = len(content_lines)
|
|
90
|
+
for i in range(len(content_lines) - 1, -1, -1):
|
|
91
|
+
if content_lines[i].strip():
|
|
92
|
+
end = i + 1
|
|
93
|
+
break
|
|
94
|
+
|
|
95
|
+
return content_lines[start:end]
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def replace_header_with_proper_casing(content_lines, proper_header):
|
|
99
|
+
"""Replace the first header in content with the properly cased version."""
|
|
100
|
+
if not content_lines:
|
|
101
|
+
return content_lines
|
|
102
|
+
|
|
103
|
+
# Find and replace the first header line
|
|
104
|
+
for i, line in enumerate(content_lines):
|
|
105
|
+
if line.startswith("## "):
|
|
106
|
+
content_lines[i] = f"## {proper_header}\n"
|
|
107
|
+
break
|
|
108
|
+
|
|
109
|
+
return content_lines
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def extract_description_and_filter_content(content_lines, default_description):
|
|
113
|
+
"""Extract description from first non-empty line that starts with 'Description:' and return filtered content."""
|
|
114
|
+
trimmed_content = trim_content(content_lines)
|
|
115
|
+
description = ""
|
|
116
|
+
description_line = None
|
|
117
|
+
|
|
118
|
+
# Find the first non-empty, non-header line that starts with "Description:"
|
|
119
|
+
for i, line in enumerate(trimmed_content):
|
|
120
|
+
stripped_line = line.strip()
|
|
121
|
+
if (
|
|
122
|
+
stripped_line
|
|
123
|
+
and not stripped_line.startswith("#")
|
|
124
|
+
and not stripped_line.startswith("##")
|
|
125
|
+
):
|
|
126
|
+
if stripped_line.startswith("Description:"):
|
|
127
|
+
# Extract the description text after "Description:"
|
|
128
|
+
description = stripped_line[len("Description:") :].strip()
|
|
129
|
+
description_line = i
|
|
130
|
+
break
|
|
131
|
+
else:
|
|
132
|
+
# Found a non-header line that doesn't start with Description:, stop looking
|
|
133
|
+
break
|
|
134
|
+
|
|
135
|
+
# Only use explicit descriptions - no fallback extraction
|
|
136
|
+
if description and description_line is not None:
|
|
137
|
+
# Remove the description line from content
|
|
138
|
+
filtered_content = (
|
|
139
|
+
trimmed_content[:description_line] + trimmed_content[description_line + 1 :]
|
|
140
|
+
)
|
|
141
|
+
# Trim again after removing description line
|
|
142
|
+
filtered_content = trim_content(filtered_content)
|
|
143
|
+
else:
|
|
144
|
+
# No description found, keep all content
|
|
145
|
+
filtered_content = trimmed_content
|
|
146
|
+
|
|
147
|
+
return description, filtered_content
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def write_cursor_prompt(content_lines, filename, prompts_dir, section_name=None):
|
|
151
|
+
"""Write a Cursor prompt file with frontmatter including description."""
|
|
152
|
+
filepath = os.path.join(prompts_dir, filename + ".mdc")
|
|
153
|
+
|
|
154
|
+
# Don't generate a default description, leave empty if none found
|
|
155
|
+
default_description = ""
|
|
156
|
+
description, filtered_content = extract_description_and_filter_content(
|
|
157
|
+
content_lines, default_description
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
with open(filepath, "w") as f:
|
|
161
|
+
# Only add frontmatter if description is not empty
|
|
162
|
+
if description:
|
|
163
|
+
frontmatter = f"""---
|
|
164
|
+
description: {description}
|
|
165
|
+
---
|
|
166
|
+
"""
|
|
167
|
+
f.write(frontmatter)
|
|
168
|
+
|
|
169
|
+
for line in filtered_content:
|
|
170
|
+
f.write(line)
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def write_github_prompt(content_lines, filename, prompts_dir, section_name=None):
|
|
174
|
+
"""Write a GitHub prompt file with proper frontmatter."""
|
|
175
|
+
filepath = os.path.join(prompts_dir, filename + ".prompt.md")
|
|
176
|
+
|
|
177
|
+
# Don't generate a default description, leave empty if none found
|
|
178
|
+
default_description = ""
|
|
179
|
+
description, filtered_content = extract_description_and_filter_content(
|
|
180
|
+
content_lines, default_description
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
frontmatter = f"""---
|
|
184
|
+
mode: 'agent'
|
|
185
|
+
description: '{description}'
|
|
186
|
+
---
|
|
187
|
+
"""
|
|
188
|
+
|
|
189
|
+
with open(filepath, "w") as f:
|
|
190
|
+
f.write(frontmatter)
|
|
191
|
+
for line in filtered_content:
|
|
192
|
+
f.write(line)
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
def process_unmapped_section(lines, section_name, rules_dir, github_prompts_dir):
|
|
196
|
+
"""Process an unmapped section as a manually applied rule (prompt)."""
|
|
197
|
+
section_content = extract_section(lines, f"## {section_name}")
|
|
198
|
+
if any(line.strip() for line in section_content):
|
|
199
|
+
filename = header_to_filename(section_name)
|
|
200
|
+
|
|
201
|
+
# Replace header with proper casing
|
|
202
|
+
section_content = replace_header_with_proper_casing(
|
|
203
|
+
section_content, section_name
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
# Create prompt files (same as None case in SECTION_GLOBS)
|
|
207
|
+
write_cursor_prompt(section_content, filename, rules_dir, section_name)
|
|
208
|
+
write_github_prompt(section_content, filename, github_prompts_dir, section_name)
|
|
209
|
+
return True
|
|
210
|
+
return False
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
def explode_main(
|
|
214
|
+
input_file: Annotated[
|
|
215
|
+
str, typer.Argument(help="Input markdown file")
|
|
216
|
+
] = "instructions.md",
|
|
217
|
+
verbose: Annotated[
|
|
218
|
+
bool, typer.Option("--verbose", "-v", help="Enable verbose logging")
|
|
219
|
+
] = False,
|
|
220
|
+
config: Annotated[
|
|
221
|
+
str, typer.Option("--config", "-c", help="Custom configuration file path")
|
|
222
|
+
] = None,
|
|
223
|
+
):
|
|
224
|
+
"""Convert instruction file to separate rule files."""
|
|
225
|
+
if verbose:
|
|
226
|
+
logging.basicConfig(level=logging.DEBUG)
|
|
227
|
+
structlog.configure(
|
|
228
|
+
wrapper_class=structlog.make_filtering_bound_logger(logging.DEBUG),
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
# Load section globs (with optional custom config)
|
|
232
|
+
SECTION_GLOBS = load_section_globs(config)
|
|
233
|
+
|
|
234
|
+
logger.info("Starting explode operation", input_file=input_file, config=config)
|
|
235
|
+
|
|
236
|
+
# Work in current directory ($PWD)
|
|
237
|
+
rules_dir = os.path.join(os.getcwd(), ".cursor", "rules")
|
|
238
|
+
copilot_dir = os.path.join(os.getcwd(), ".github", "instructions")
|
|
239
|
+
github_prompts_dir = os.path.join(os.getcwd(), ".github", "prompts")
|
|
240
|
+
|
|
241
|
+
os.makedirs(rules_dir, exist_ok=True)
|
|
242
|
+
os.makedirs(copilot_dir, exist_ok=True)
|
|
243
|
+
os.makedirs(github_prompts_dir, exist_ok=True)
|
|
244
|
+
|
|
245
|
+
input_path = os.path.join(os.getcwd(), input_file)
|
|
246
|
+
|
|
247
|
+
try:
|
|
248
|
+
with open(input_path, "r") as f:
|
|
249
|
+
lines = f.readlines()
|
|
250
|
+
except FileNotFoundError:
|
|
251
|
+
logger.error("Input file not found", input_file=input_path)
|
|
252
|
+
raise typer.Exit(1)
|
|
253
|
+
|
|
254
|
+
# General instructions
|
|
255
|
+
general = extract_general(lines)
|
|
256
|
+
if any(line.strip() for line in general):
|
|
257
|
+
general_header = """
|
|
258
|
+
---
|
|
259
|
+
description:
|
|
260
|
+
alwaysApply: true
|
|
261
|
+
---
|
|
262
|
+
"""
|
|
263
|
+
write_rule(os.path.join(rules_dir, "general.mdc"), general_header, general)
|
|
264
|
+
# Copilot general instructions (no frontmatter)
|
|
265
|
+
write_rule(
|
|
266
|
+
os.path.join(os.getcwd(), ".github", "copilot-instructions.md"), "", general
|
|
267
|
+
)
|
|
268
|
+
|
|
269
|
+
# Process each section dynamically
|
|
270
|
+
found_sections = set()
|
|
271
|
+
for section_name, glob_or_description in SECTION_GLOBS.items():
|
|
272
|
+
section_content = extract_section(lines, f"## {section_name}")
|
|
273
|
+
if any(line.strip() for line in section_content):
|
|
274
|
+
found_sections.add(section_name)
|
|
275
|
+
filename = header_to_filename(section_name)
|
|
276
|
+
|
|
277
|
+
# Replace header with proper casing from SECTION_GLOBS
|
|
278
|
+
section_content = replace_header_with_proper_casing(
|
|
279
|
+
section_content, section_name
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
if glob_or_description is not None:
|
|
283
|
+
# It's a glob pattern - create instruction files
|
|
284
|
+
cursor_header = generate_cursor_frontmatter(glob_or_description)
|
|
285
|
+
write_rule(
|
|
286
|
+
os.path.join(rules_dir, filename + ".mdc"),
|
|
287
|
+
cursor_header,
|
|
288
|
+
section_content,
|
|
289
|
+
)
|
|
290
|
+
|
|
291
|
+
copilot_header = generate_copilot_frontmatter(glob_or_description)
|
|
292
|
+
write_rule(
|
|
293
|
+
os.path.join(copilot_dir, filename + ".instructions.md"),
|
|
294
|
+
copilot_header,
|
|
295
|
+
section_content,
|
|
296
|
+
)
|
|
297
|
+
else:
|
|
298
|
+
# It's a prompt - create prompt files using the original section name for header
|
|
299
|
+
write_cursor_prompt(section_content, filename, rules_dir, section_name)
|
|
300
|
+
write_github_prompt(
|
|
301
|
+
section_content, filename, github_prompts_dir, section_name
|
|
302
|
+
)
|
|
303
|
+
|
|
304
|
+
# Check for sections in mapping that don't exist in the file
|
|
305
|
+
for section_name in SECTION_GLOBS:
|
|
306
|
+
if section_name not in found_sections:
|
|
307
|
+
logger.warning("Section not found in file", section=section_name)
|
|
308
|
+
|
|
309
|
+
# Process unmapped sections as manually applied rules (prompts)
|
|
310
|
+
processed_unmapped = set()
|
|
311
|
+
for line in lines:
|
|
312
|
+
if line.startswith("## "):
|
|
313
|
+
section_header = line.strip()
|
|
314
|
+
section_name = section_header[3:] # Remove "## "
|
|
315
|
+
# Case insensitive check and avoid duplicate processing
|
|
316
|
+
if (
|
|
317
|
+
not any(
|
|
318
|
+
section_name.lower() == mapped_section.lower()
|
|
319
|
+
for mapped_section in SECTION_GLOBS
|
|
320
|
+
)
|
|
321
|
+
and section_name not in processed_unmapped
|
|
322
|
+
):
|
|
323
|
+
if process_unmapped_section(
|
|
324
|
+
lines, section_name, rules_dir, github_prompts_dir
|
|
325
|
+
):
|
|
326
|
+
processed_unmapped.add(section_name)
|
|
327
|
+
|
|
328
|
+
logger.info(
|
|
329
|
+
"Explode operation completed",
|
|
330
|
+
cursor_rules=rules_dir,
|
|
331
|
+
copilot_instructions=copilot_dir,
|
|
332
|
+
github_prompts=github_prompts_dir,
|
|
333
|
+
)
|
|
334
|
+
typer.echo(
|
|
335
|
+
"Created Cursor rules in .cursor/rules/, Copilot instructions in .github/instructions/, and prompts in respective directories"
|
|
336
|
+
)
|
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
"""Implode command: Bundle rule files into a single instruction file."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing_extensions import Annotated
|
|
6
|
+
import logging
|
|
7
|
+
|
|
8
|
+
import typer
|
|
9
|
+
import structlog
|
|
10
|
+
|
|
11
|
+
from llm_ide_rules.constants import load_section_globs, header_to_filename, filename_to_header
|
|
12
|
+
|
|
13
|
+
logger = structlog.get_logger()
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def get_ordered_files(file_list, section_globs_keys):
|
|
17
|
+
"""Order files based on SECTION_GLOBS key order, with unmapped files at the end."""
|
|
18
|
+
file_dict = {f.stem: f for f in file_list}
|
|
19
|
+
ordered_files = []
|
|
20
|
+
|
|
21
|
+
# Add files in SECTION_GLOBS order
|
|
22
|
+
for section_name in section_globs_keys:
|
|
23
|
+
filename = header_to_filename(section_name)
|
|
24
|
+
if filename in file_dict:
|
|
25
|
+
ordered_files.append(file_dict[filename])
|
|
26
|
+
del file_dict[filename]
|
|
27
|
+
|
|
28
|
+
# Add any remaining files (not in SECTION_GLOBS) sorted alphabetically
|
|
29
|
+
remaining_files = sorted(file_dict.values(), key=lambda p: p.name)
|
|
30
|
+
ordered_files.extend(remaining_files)
|
|
31
|
+
|
|
32
|
+
return ordered_files
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def get_ordered_files_github(file_list, section_globs_keys):
|
|
36
|
+
"""Order GitHub instruction files based on SECTION_GLOBS key order, with unmapped files at the end.
|
|
37
|
+
Handles .instructions suffix by stripping it for ordering purposes."""
|
|
38
|
+
# Create dict mapping base filename (without .instructions) to the actual file
|
|
39
|
+
file_dict = {}
|
|
40
|
+
for f in file_list:
|
|
41
|
+
base_stem = f.stem.replace(".instructions", "")
|
|
42
|
+
file_dict[base_stem] = f
|
|
43
|
+
|
|
44
|
+
ordered_files = []
|
|
45
|
+
|
|
46
|
+
# Add files in SECTION_GLOBS order
|
|
47
|
+
for section_name in section_globs_keys:
|
|
48
|
+
filename = header_to_filename(section_name)
|
|
49
|
+
if filename in file_dict:
|
|
50
|
+
ordered_files.append(file_dict[filename])
|
|
51
|
+
del file_dict[filename]
|
|
52
|
+
|
|
53
|
+
# Add any remaining files (not in SECTION_GLOBS) sorted alphabetically
|
|
54
|
+
remaining_files = sorted(file_dict.values(), key=lambda p: p.name)
|
|
55
|
+
ordered_files.extend(remaining_files)
|
|
56
|
+
|
|
57
|
+
return ordered_files
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def bundle_cursor_rules(rules_dir, output_file, section_globs):
|
|
61
|
+
"""Bundle Cursor rule files into a single file."""
|
|
62
|
+
rule_files = list(Path(rules_dir).glob("*.mdc"))
|
|
63
|
+
general = [f for f in rule_files if f.stem == "general"]
|
|
64
|
+
others = [f for f in rule_files if f.stem != "general"]
|
|
65
|
+
|
|
66
|
+
# Order the non-general files based on section_globs
|
|
67
|
+
ordered_others = get_ordered_files(others, section_globs.keys())
|
|
68
|
+
ordered = general + ordered_others
|
|
69
|
+
|
|
70
|
+
def resolve_header_from_stem(stem):
|
|
71
|
+
"""Return the canonical header for a given filename stem.
|
|
72
|
+
|
|
73
|
+
Prefer exact header names from section_globs (preserves acronyms like FastAPI, TypeScript).
|
|
74
|
+
Fallback to title-casing the filename when not found in section_globs.
|
|
75
|
+
"""
|
|
76
|
+
for section_name in section_globs.keys():
|
|
77
|
+
if header_to_filename(section_name) == stem:
|
|
78
|
+
return section_name
|
|
79
|
+
return filename_to_header(stem)
|
|
80
|
+
|
|
81
|
+
with open(output_file, "w") as out:
|
|
82
|
+
for rule_file in ordered:
|
|
83
|
+
with open(rule_file, "r") as f:
|
|
84
|
+
content = f.read().strip()
|
|
85
|
+
if not content:
|
|
86
|
+
continue
|
|
87
|
+
content = strip_yaml_frontmatter(content)
|
|
88
|
+
content = strip_header(content)
|
|
89
|
+
# Use canonical header names from SECTION_GLOBS when available
|
|
90
|
+
header = resolve_header_from_stem(rule_file.stem)
|
|
91
|
+
if rule_file.stem != "general":
|
|
92
|
+
out.write(f"## {header}\n\n")
|
|
93
|
+
out.write(content)
|
|
94
|
+
out.write("\n\n")
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def strip_yaml_frontmatter(text):
|
|
98
|
+
"""Strip YAML frontmatter from text."""
|
|
99
|
+
lines = text.splitlines()
|
|
100
|
+
if lines and lines[0].strip() == "---":
|
|
101
|
+
# Find the next '---' after the first
|
|
102
|
+
for i in range(1, len(lines)):
|
|
103
|
+
if lines[i].strip() == "---":
|
|
104
|
+
return "\n".join(lines[i + 1 :]).lstrip("\n")
|
|
105
|
+
return text
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def strip_header(text):
|
|
109
|
+
"""Remove the first markdown header (## Header) from text if present."""
|
|
110
|
+
lines = text.splitlines()
|
|
111
|
+
if lines and lines[0].startswith("## "):
|
|
112
|
+
# Remove the header line and any immediately following empty lines
|
|
113
|
+
remaining_lines = lines[1:]
|
|
114
|
+
while remaining_lines and not remaining_lines[0].strip():
|
|
115
|
+
remaining_lines = remaining_lines[1:]
|
|
116
|
+
return "\n".join(remaining_lines)
|
|
117
|
+
return text
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def bundle_github_instructions(instructions_dir, output_file, section_globs):
|
|
121
|
+
"""Bundle GitHub instruction files into a single file."""
|
|
122
|
+
copilot_general = Path(os.getcwd()) / ".github" / "copilot-instructions.md"
|
|
123
|
+
instr_files = list(Path(instructions_dir).glob("*.instructions.md"))
|
|
124
|
+
|
|
125
|
+
# Order the instruction files based on section_globs
|
|
126
|
+
# We need to create a modified version that strips .instructions from stems for ordering
|
|
127
|
+
ordered_files = get_ordered_files_github(instr_files, section_globs.keys())
|
|
128
|
+
|
|
129
|
+
def resolve_header_from_stem(stem):
|
|
130
|
+
"""Return the canonical header for a given filename stem.
|
|
131
|
+
|
|
132
|
+
Prefer exact header names from section_globs (preserves acronyms like FastAPI, TypeScript).
|
|
133
|
+
Fallback to title-casing the filename when not found in section_globs.
|
|
134
|
+
"""
|
|
135
|
+
for section_name in section_globs.keys():
|
|
136
|
+
if header_to_filename(section_name) == stem:
|
|
137
|
+
return section_name
|
|
138
|
+
return filename_to_header(stem)
|
|
139
|
+
|
|
140
|
+
with open(output_file, "w") as out:
|
|
141
|
+
# Write general copilot instructions if present
|
|
142
|
+
if copilot_general.exists():
|
|
143
|
+
content = copilot_general.read_text().strip()
|
|
144
|
+
if content:
|
|
145
|
+
out.write(content)
|
|
146
|
+
out.write("\n\n")
|
|
147
|
+
for instr_file in ordered_files:
|
|
148
|
+
with open(instr_file, "r") as f:
|
|
149
|
+
content = f.read().strip()
|
|
150
|
+
if not content:
|
|
151
|
+
continue
|
|
152
|
+
content = strip_yaml_frontmatter(content)
|
|
153
|
+
content = strip_header(content)
|
|
154
|
+
# Use canonical header names from SECTION_GLOBS when available
|
|
155
|
+
base_stem = instr_file.stem.replace(".instructions", "")
|
|
156
|
+
header = resolve_header_from_stem(base_stem)
|
|
157
|
+
out.write(f"## {header}\n\n")
|
|
158
|
+
out.write(content)
|
|
159
|
+
out.write("\n\n")
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
def cursor(
|
|
163
|
+
output: Annotated[str, typer.Argument(help="Output file")] = "instructions.md",
|
|
164
|
+
verbose: Annotated[bool, typer.Option("--verbose", "-v", help="Enable verbose logging")] = False,
|
|
165
|
+
config: Annotated[str, typer.Option("--config", "-c", help="Custom configuration file path")] = None,
|
|
166
|
+
):
|
|
167
|
+
"""Bundle Cursor rules into a single file."""
|
|
168
|
+
if verbose:
|
|
169
|
+
logging.basicConfig(level=logging.DEBUG)
|
|
170
|
+
structlog.configure(
|
|
171
|
+
wrapper_class=structlog.make_filtering_bound_logger(logging.DEBUG),
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
# Load section globs (with optional custom config)
|
|
175
|
+
SECTION_GLOBS = load_section_globs(config)
|
|
176
|
+
|
|
177
|
+
rules_dir = os.path.join(os.getcwd(), ".cursor", "rules")
|
|
178
|
+
output_path = os.path.join(os.getcwd(), output)
|
|
179
|
+
|
|
180
|
+
logger.info("Bundling Cursor rules", rules_dir=rules_dir, output_file=output_path, config=config)
|
|
181
|
+
|
|
182
|
+
if not Path(rules_dir).exists():
|
|
183
|
+
logger.error("Cursor rules directory not found", rules_dir=rules_dir)
|
|
184
|
+
raise typer.Exit(1)
|
|
185
|
+
|
|
186
|
+
bundle_cursor_rules(rules_dir, output_path, SECTION_GLOBS)
|
|
187
|
+
logger.info("Cursor rules bundled successfully", output_file=output_path)
|
|
188
|
+
typer.echo(f"Bundled cursor rules into {output}")
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
def github(
|
|
192
|
+
output: Annotated[str, typer.Argument(help="Output file")] = "instructions.md",
|
|
193
|
+
verbose: Annotated[bool, typer.Option("--verbose", "-v", help="Enable verbose logging")] = False,
|
|
194
|
+
config: Annotated[str, typer.Option("--config", "-c", help="Custom configuration file path")] = None,
|
|
195
|
+
):
|
|
196
|
+
"""Bundle GitHub/Copilot instructions into a single file."""
|
|
197
|
+
if verbose:
|
|
198
|
+
logging.basicConfig(level=logging.DEBUG)
|
|
199
|
+
structlog.configure(
|
|
200
|
+
wrapper_class=structlog.make_filtering_bound_logger(logging.DEBUG),
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
# Load section globs (with optional custom config)
|
|
204
|
+
SECTION_GLOBS = load_section_globs(config)
|
|
205
|
+
|
|
206
|
+
instructions_dir = os.path.join(os.getcwd(), ".github", "instructions")
|
|
207
|
+
output_path = os.path.join(os.getcwd(), output)
|
|
208
|
+
|
|
209
|
+
logger.info("Bundling GitHub instructions", instructions_dir=instructions_dir, output_file=output_path, config=config)
|
|
210
|
+
|
|
211
|
+
if not Path(instructions_dir).exists():
|
|
212
|
+
logger.error("GitHub instructions directory not found", instructions_dir=instructions_dir)
|
|
213
|
+
raise typer.Exit(1)
|
|
214
|
+
|
|
215
|
+
bundle_github_instructions(instructions_dir, output_path, SECTION_GLOBS)
|
|
216
|
+
logger.info("GitHub instructions bundled successfully", output_file=output_path)
|
|
217
|
+
typer.echo(f"Bundled github instructions into {output}")
|
|
218
|
+
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
"""Shared constants for explode and implode functionality."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
def load_section_globs(custom_config_path: str = None) -> dict:
|
|
8
|
+
"""Load section globs from JSON config file.
|
|
9
|
+
|
|
10
|
+
Args:
|
|
11
|
+
custom_config_path: Path to custom configuration file to override defaults
|
|
12
|
+
|
|
13
|
+
Returns:
|
|
14
|
+
Dictionary mapping section headers to their file globs or None for prompts
|
|
15
|
+
"""
|
|
16
|
+
if custom_config_path and os.path.exists(custom_config_path):
|
|
17
|
+
config_path = Path(custom_config_path)
|
|
18
|
+
else:
|
|
19
|
+
# Load default bundled config
|
|
20
|
+
config_path = Path(__file__).parent / "sections.json"
|
|
21
|
+
|
|
22
|
+
with open(config_path, 'r') as f:
|
|
23
|
+
config = json.load(f)
|
|
24
|
+
|
|
25
|
+
return config["section_globs"]
|
|
26
|
+
|
|
27
|
+
# Default section globs - loaded from bundled JSON
|
|
28
|
+
SECTION_GLOBS = load_section_globs()
|
|
29
|
+
|
|
30
|
+
def header_to_filename(header):
|
|
31
|
+
"""Convert a section header to a filename."""
|
|
32
|
+
return header.lower().replace(' ', '-')
|
|
33
|
+
|
|
34
|
+
def filename_to_header(filename):
|
|
35
|
+
"""Convert a filename back to a section header."""
|
|
36
|
+
return filename.replace('-', ' ').title()
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
{
|
|
2
|
+
"section_globs": {
|
|
3
|
+
"Python": "**/*.py",
|
|
4
|
+
"Python App": "**/*.py",
|
|
5
|
+
"Pytest Integration Tests": "tests/integration/**/*.py",
|
|
6
|
+
"Pytest Tests": "tests/**/*.py",
|
|
7
|
+
"Python Route Tests": "tests/routes/**/*.py",
|
|
8
|
+
"Alembic Migrations": "migrations/versions/*.py",
|
|
9
|
+
"FastAPI": "app/routes/**/*.py",
|
|
10
|
+
"React": "**/*.tsx",
|
|
11
|
+
"React Router": "web/app/routes/**/*.tsx",
|
|
12
|
+
"React Router Client Loader": null,
|
|
13
|
+
"Shell": "**/*.sh",
|
|
14
|
+
"TypeScript": "**/*.ts,**/*.tsx",
|
|
15
|
+
"TypeScript DocString": null,
|
|
16
|
+
"Secrets": null
|
|
17
|
+
}
|
|
18
|
+
}
|