chatmcp-cli 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aider/__init__.py +20 -0
- aider/__main__.py +4 -0
- aider/_version.py +21 -0
- aider/analytics.py +250 -0
- aider/args.py +926 -0
- aider/args_formatter.py +228 -0
- aider/coders/__init__.py +34 -0
- aider/coders/architect_coder.py +48 -0
- aider/coders/architect_prompts.py +40 -0
- aider/coders/ask_coder.py +9 -0
- aider/coders/ask_prompts.py +35 -0
- aider/coders/base_coder.py +2483 -0
- aider/coders/base_prompts.py +60 -0
- aider/coders/chat_chunks.py +64 -0
- aider/coders/context_coder.py +53 -0
- aider/coders/context_prompts.py +75 -0
- aider/coders/editblock_coder.py +657 -0
- aider/coders/editblock_fenced_coder.py +10 -0
- aider/coders/editblock_fenced_prompts.py +143 -0
- aider/coders/editblock_func_coder.py +141 -0
- aider/coders/editblock_func_prompts.py +27 -0
- aider/coders/editblock_prompts.py +174 -0
- aider/coders/editor_diff_fenced_coder.py +9 -0
- aider/coders/editor_diff_fenced_prompts.py +11 -0
- aider/coders/editor_editblock_coder.py +8 -0
- aider/coders/editor_editblock_prompts.py +18 -0
- aider/coders/editor_whole_coder.py +8 -0
- aider/coders/editor_whole_prompts.py +10 -0
- aider/coders/help_coder.py +16 -0
- aider/coders/help_prompts.py +46 -0
- aider/coders/patch_coder.py +706 -0
- aider/coders/patch_prompts.py +161 -0
- aider/coders/search_replace.py +757 -0
- aider/coders/shell.py +37 -0
- aider/coders/single_wholefile_func_coder.py +102 -0
- aider/coders/single_wholefile_func_prompts.py +27 -0
- aider/coders/udiff_coder.py +429 -0
- aider/coders/udiff_prompts.py +115 -0
- aider/coders/udiff_simple.py +14 -0
- aider/coders/udiff_simple_prompts.py +25 -0
- aider/coders/wholefile_coder.py +144 -0
- aider/coders/wholefile_func_coder.py +134 -0
- aider/coders/wholefile_func_prompts.py +27 -0
- aider/coders/wholefile_prompts.py +67 -0
- aider/commands.py +1665 -0
- aider/copypaste.py +72 -0
- aider/deprecated.py +126 -0
- aider/diffs.py +128 -0
- aider/dump.py +29 -0
- aider/editor.py +147 -0
- aider/exceptions.py +107 -0
- aider/format_settings.py +26 -0
- aider/gui.py +545 -0
- aider/help.py +163 -0
- aider/help_pats.py +19 -0
- aider/history.py +143 -0
- aider/io.py +1175 -0
- aider/linter.py +304 -0
- aider/llm.py +47 -0
- aider/main.py +1267 -0
- aider/mdstream.py +243 -0
- aider/models.py +1286 -0
- aider/onboarding.py +428 -0
- aider/openrouter.py +128 -0
- aider/prompts.py +64 -0
- aider/queries/tree-sitter-language-pack/README.md +7 -0
- aider/queries/tree-sitter-language-pack/arduino-tags.scm +5 -0
- aider/queries/tree-sitter-language-pack/c-tags.scm +9 -0
- aider/queries/tree-sitter-language-pack/chatito-tags.scm +16 -0
- aider/queries/tree-sitter-language-pack/commonlisp-tags.scm +122 -0
- aider/queries/tree-sitter-language-pack/cpp-tags.scm +15 -0
- aider/queries/tree-sitter-language-pack/csharp-tags.scm +26 -0
- aider/queries/tree-sitter-language-pack/d-tags.scm +26 -0
- aider/queries/tree-sitter-language-pack/dart-tags.scm +92 -0
- aider/queries/tree-sitter-language-pack/elisp-tags.scm +5 -0
- aider/queries/tree-sitter-language-pack/elixir-tags.scm +54 -0
- aider/queries/tree-sitter-language-pack/elm-tags.scm +19 -0
- aider/queries/tree-sitter-language-pack/gleam-tags.scm +41 -0
- aider/queries/tree-sitter-language-pack/go-tags.scm +42 -0
- aider/queries/tree-sitter-language-pack/java-tags.scm +20 -0
- aider/queries/tree-sitter-language-pack/javascript-tags.scm +88 -0
- aider/queries/tree-sitter-language-pack/lua-tags.scm +34 -0
- aider/queries/tree-sitter-language-pack/ocaml-tags.scm +115 -0
- aider/queries/tree-sitter-language-pack/ocaml_interface-tags.scm +98 -0
- aider/queries/tree-sitter-language-pack/pony-tags.scm +39 -0
- aider/queries/tree-sitter-language-pack/properties-tags.scm +5 -0
- aider/queries/tree-sitter-language-pack/python-tags.scm +14 -0
- aider/queries/tree-sitter-language-pack/r-tags.scm +21 -0
- aider/queries/tree-sitter-language-pack/racket-tags.scm +12 -0
- aider/queries/tree-sitter-language-pack/ruby-tags.scm +64 -0
- aider/queries/tree-sitter-language-pack/rust-tags.scm +60 -0
- aider/queries/tree-sitter-language-pack/solidity-tags.scm +43 -0
- aider/queries/tree-sitter-language-pack/swift-tags.scm +51 -0
- aider/queries/tree-sitter-language-pack/udev-tags.scm +20 -0
- aider/queries/tree-sitter-languages/README.md +23 -0
- aider/queries/tree-sitter-languages/c-tags.scm +9 -0
- aider/queries/tree-sitter-languages/c_sharp-tags.scm +46 -0
- aider/queries/tree-sitter-languages/cpp-tags.scm +15 -0
- aider/queries/tree-sitter-languages/dart-tags.scm +91 -0
- aider/queries/tree-sitter-languages/elisp-tags.scm +8 -0
- aider/queries/tree-sitter-languages/elixir-tags.scm +54 -0
- aider/queries/tree-sitter-languages/elm-tags.scm +19 -0
- aider/queries/tree-sitter-languages/go-tags.scm +30 -0
- aider/queries/tree-sitter-languages/hcl-tags.scm +77 -0
- aider/queries/tree-sitter-languages/java-tags.scm +20 -0
- aider/queries/tree-sitter-languages/javascript-tags.scm +88 -0
- aider/queries/tree-sitter-languages/kotlin-tags.scm +27 -0
- aider/queries/tree-sitter-languages/ocaml-tags.scm +115 -0
- aider/queries/tree-sitter-languages/ocaml_interface-tags.scm +98 -0
- aider/queries/tree-sitter-languages/php-tags.scm +26 -0
- aider/queries/tree-sitter-languages/python-tags.scm +12 -0
- aider/queries/tree-sitter-languages/ql-tags.scm +26 -0
- aider/queries/tree-sitter-languages/ruby-tags.scm +64 -0
- aider/queries/tree-sitter-languages/rust-tags.scm +60 -0
- aider/queries/tree-sitter-languages/scala-tags.scm +65 -0
- aider/queries/tree-sitter-languages/typescript-tags.scm +41 -0
- aider/reasoning_tags.py +82 -0
- aider/repo.py +623 -0
- aider/repomap.py +847 -0
- aider/report.py +200 -0
- aider/resources/__init__.py +3 -0
- aider/resources/model-metadata.json +468 -0
- aider/resources/model-settings.yml +1767 -0
- aider/run_cmd.py +132 -0
- aider/scrape.py +284 -0
- aider/sendchat.py +61 -0
- aider/special.py +203 -0
- aider/urls.py +17 -0
- aider/utils.py +338 -0
- aider/versioncheck.py +113 -0
- aider/voice.py +187 -0
- aider/waiting.py +221 -0
- aider/watch.py +318 -0
- aider/watch_prompts.py +12 -0
- aider/website/Gemfile +8 -0
- aider/website/_includes/blame.md +162 -0
- aider/website/_includes/get-started.md +22 -0
- aider/website/_includes/help-tip.md +5 -0
- aider/website/_includes/help.md +24 -0
- aider/website/_includes/install.md +5 -0
- aider/website/_includes/keys.md +4 -0
- aider/website/_includes/model-warnings.md +67 -0
- aider/website/_includes/multi-line.md +22 -0
- aider/website/_includes/python-m-aider.md +5 -0
- aider/website/_includes/recording.css +228 -0
- aider/website/_includes/recording.md +34 -0
- aider/website/_includes/replit-pipx.md +9 -0
- aider/website/_includes/works-best.md +1 -0
- aider/website/_sass/custom/custom.scss +103 -0
- aider/website/docs/config/adv-model-settings.md +1881 -0
- aider/website/docs/config/aider_conf.md +527 -0
- aider/website/docs/config/api-keys.md +90 -0
- aider/website/docs/config/dotenv.md +478 -0
- aider/website/docs/config/editor.md +127 -0
- aider/website/docs/config/model-aliases.md +103 -0
- aider/website/docs/config/options.md +843 -0
- aider/website/docs/config/reasoning.md +209 -0
- aider/website/docs/config.md +44 -0
- aider/website/docs/faq.md +378 -0
- aider/website/docs/git.md +76 -0
- aider/website/docs/index.md +47 -0
- aider/website/docs/install/codespaces.md +39 -0
- aider/website/docs/install/docker.md +57 -0
- aider/website/docs/install/optional.md +100 -0
- aider/website/docs/install/replit.md +8 -0
- aider/website/docs/install.md +115 -0
- aider/website/docs/languages.md +264 -0
- aider/website/docs/legal/contributor-agreement.md +111 -0
- aider/website/docs/legal/privacy.md +104 -0
- aider/website/docs/llms/anthropic.md +77 -0
- aider/website/docs/llms/azure.md +48 -0
- aider/website/docs/llms/bedrock.md +132 -0
- aider/website/docs/llms/cohere.md +34 -0
- aider/website/docs/llms/deepseek.md +32 -0
- aider/website/docs/llms/gemini.md +49 -0
- aider/website/docs/llms/github.md +105 -0
- aider/website/docs/llms/groq.md +36 -0
- aider/website/docs/llms/lm-studio.md +39 -0
- aider/website/docs/llms/ollama.md +75 -0
- aider/website/docs/llms/openai-compat.md +39 -0
- aider/website/docs/llms/openai.md +58 -0
- aider/website/docs/llms/openrouter.md +78 -0
- aider/website/docs/llms/other.md +103 -0
- aider/website/docs/llms/vertex.md +50 -0
- aider/website/docs/llms/warnings.md +10 -0
- aider/website/docs/llms/xai.md +53 -0
- aider/website/docs/llms.md +54 -0
- aider/website/docs/more/analytics.md +122 -0
- aider/website/docs/more/edit-formats.md +116 -0
- aider/website/docs/more/infinite-output.md +137 -0
- aider/website/docs/more-info.md +8 -0
- aider/website/docs/recordings/auto-accept-architect.md +31 -0
- aider/website/docs/recordings/dont-drop-original-read-files.md +35 -0
- aider/website/docs/recordings/index.md +21 -0
- aider/website/docs/recordings/model-accepts-settings.md +69 -0
- aider/website/docs/recordings/tree-sitter-language-pack.md +80 -0
- aider/website/docs/repomap.md +112 -0
- aider/website/docs/scripting.md +100 -0
- aider/website/docs/troubleshooting/aider-not-found.md +24 -0
- aider/website/docs/troubleshooting/edit-errors.md +76 -0
- aider/website/docs/troubleshooting/imports.md +62 -0
- aider/website/docs/troubleshooting/models-and-keys.md +54 -0
- aider/website/docs/troubleshooting/support.md +79 -0
- aider/website/docs/troubleshooting/token-limits.md +96 -0
- aider/website/docs/troubleshooting/warnings.md +12 -0
- aider/website/docs/troubleshooting.md +11 -0
- aider/website/docs/usage/browser.md +57 -0
- aider/website/docs/usage/caching.md +49 -0
- aider/website/docs/usage/commands.md +132 -0
- aider/website/docs/usage/conventions.md +119 -0
- aider/website/docs/usage/copypaste.md +121 -0
- aider/website/docs/usage/images-urls.md +48 -0
- aider/website/docs/usage/lint-test.md +118 -0
- aider/website/docs/usage/modes.md +211 -0
- aider/website/docs/usage/not-code.md +179 -0
- aider/website/docs/usage/notifications.md +87 -0
- aider/website/docs/usage/tips.md +79 -0
- aider/website/docs/usage/tutorials.md +30 -0
- aider/website/docs/usage/voice.md +121 -0
- aider/website/docs/usage/watch.md +294 -0
- aider/website/docs/usage.md +92 -0
- aider/website/share/index.md +101 -0
- chatmcp_cli-0.1.0.dist-info/METADATA +502 -0
- chatmcp_cli-0.1.0.dist-info/RECORD +228 -0
- chatmcp_cli-0.1.0.dist-info/WHEEL +5 -0
- chatmcp_cli-0.1.0.dist-info/entry_points.txt +3 -0
- chatmcp_cli-0.1.0.dist-info/licenses/LICENSE.txt +202 -0
- chatmcp_cli-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,132 @@
|
|
1
|
+
---
|
2
|
+
parent: Connecting to LLMs
|
3
|
+
nav_order: 560
|
4
|
+
---
|
5
|
+
|
6
|
+
# Amazon Bedrock
|
7
|
+
|
8
|
+
Aider can connect to models provided by Amazon Bedrock.
|
9
|
+
To configure Aider to use the Amazon Bedrock API, you need to set up your AWS credentials.
|
10
|
+
This can be done using the AWS CLI or by setting environment variables.
|
11
|
+
|
12
|
+
## Select a Model from Amazon Bedrock
|
13
|
+
|
14
|
+
Before you can use a model through Amazon Bedrock, you must "enable" the model under the **Model
|
15
|
+
Access** screen in the AWS Management Console.
|
16
|
+
To find the `Model ID`, open the **Model Catalog** area in the Bedrock console, select the model
|
17
|
+
you want to use, and the find the `modelId` property under the "Usage" heading.
|
18
|
+
|
19
|
+
### Bedrock Inference Profiles
|
20
|
+
|
21
|
+
Amazon Bedrock has added support for a new feature called [cross-region "inference profiles."](https://aws.amazon.com/about-aws/whats-new/2024/09/amazon-bedrock-knowledge-bases-cross-region-inference/)
|
22
|
+
Some models hosted in Bedrock _only_ support these inference profiles.
|
23
|
+
If you're using one of these models, then you will need to use the `Inference Profile ID`
|
24
|
+
instead of the `Model ID` from the **Model Catalog** screen, in the AWS Management Console.
|
25
|
+
For example, the Claude Sonnet 3.7 model, release in February 2025, exclusively supports
|
26
|
+
inference through inference profiles. To use this model, you would use the
|
27
|
+
`us.anthropic.claude-3-7-sonnet-20250219-v1:0` Inference Profile ID.
|
28
|
+
In the Amazon Bedrock console, go to Inference and Assessment ➡️ Cross-region Inference
|
29
|
+
to find the `Inference Profile ID` value.
|
30
|
+
|
31
|
+
If you attempt to use a `Model ID` for a model that exclusively supports the Inference Profile
|
32
|
+
feature, you will receive an error message like the following:
|
33
|
+
|
34
|
+
> litellm.BadRequestError: BedrockException - b'{"message":"Invocation of model ID
|
35
|
+
anthropic.claude-3-7-sonnet-20250219-v1:0 with on-demand throughput isn\xe2\x80\x99t supported. Retry your
|
36
|
+
request with the ID or ARN of an inference profile that contains this model."}'
|
37
|
+
|
38
|
+
## Installation and Configuration
|
39
|
+
|
40
|
+
First, install aider:
|
41
|
+
|
42
|
+
{% include install.md %}
|
43
|
+
|
44
|
+
Next, configure your AWS credentials. This can be done using the AWS CLI or by setting environment variables.
|
45
|
+
|
46
|
+
## AWS CLI Configuration
|
47
|
+
|
48
|
+
If you haven't already, install the [AWS CLI](https://aws.amazon.com/cli/) and configure it with your credentials:
|
49
|
+
|
50
|
+
```bash
|
51
|
+
aws configure
|
52
|
+
```
|
53
|
+
|
54
|
+
This will prompt you to enter your AWS Access Key ID, Secret Access Key, and default region.
|
55
|
+
|
56
|
+
## Environment Variables
|
57
|
+
|
58
|
+
You can set the following environment variables:
|
59
|
+
|
60
|
+
```bash
|
61
|
+
export AWS_REGION=your_preferred_region
|
62
|
+
|
63
|
+
# For user authentication
|
64
|
+
export AWS_ACCESS_KEY_ID=your_access_key
|
65
|
+
export AWS_SECRET_ACCESS_KEY=your_secret_key
|
66
|
+
|
67
|
+
# For profile authentication
|
68
|
+
export AWS_PROFILE=your-profile
|
69
|
+
```
|
70
|
+
|
71
|
+
You can add these to your
|
72
|
+
[.env file](/docs/config/dotenv.html).
|
73
|
+
|
74
|
+
### Set Environment Variables with PowerShell
|
75
|
+
|
76
|
+
If you're using PowerShell on MacOS, Linux, or Windows, you can set the same AWS configuration environment variables with these commands.
|
77
|
+
|
78
|
+
```pwsh
|
79
|
+
$env:AWS_ACCESS_KEY_ID = 'your_access_key'
|
80
|
+
$env:AWS_SECRET_ACCESS_KEY = 'your_secret_key'
|
81
|
+
$env:AWS_REGION = 'us-west-2' # Put whichever AWS region that you'd like, that the Bedrock service supports.
|
82
|
+
```
|
83
|
+
|
84
|
+
|
85
|
+
## Get Started
|
86
|
+
|
87
|
+
Once your AWS credentials are set up, you can run Aider with the `--model` command line switch, specifying the Bedrock model you want to use:
|
88
|
+
|
89
|
+
```bash
|
90
|
+
# Change directory into your codebase
|
91
|
+
cd /to/your/project
|
92
|
+
|
93
|
+
aider --model bedrock/anthropic.claude-3-5-sonnet-20240620-v1:0
|
94
|
+
```
|
95
|
+
|
96
|
+
Sometimes it seems to help if you prefix the model name with "us.":
|
97
|
+
|
98
|
+
```bash
|
99
|
+
aider --model bedrock/us.anthropic.claude-3-5-sonnet-20240620-v1:0
|
100
|
+
```
|
101
|
+
|
102
|
+
|
103
|
+
## Available Models
|
104
|
+
|
105
|
+
To see some models available via Bedrock, run:
|
106
|
+
|
107
|
+
```bash
|
108
|
+
aider --list-models bedrock/
|
109
|
+
```
|
110
|
+
|
111
|
+
Make sure you have access to these models in your AWS account before attempting to use them with Aider.
|
112
|
+
|
113
|
+
## Install boto3
|
114
|
+
You may need to install the `boto3` package.
|
115
|
+
|
116
|
+
```bash
|
117
|
+
# If you installed with aider-install or `uv tool`
|
118
|
+
uv tool run --from aider-chat pip install boto3
|
119
|
+
|
120
|
+
# Or with pipx...
|
121
|
+
pipx inject aider-chat boto3
|
122
|
+
|
123
|
+
# Or with pip
|
124
|
+
pip install -U boto3
|
125
|
+
```
|
126
|
+
|
127
|
+
# More info
|
128
|
+
|
129
|
+
For more information on Amazon Bedrock and its models, refer to the [official AWS documentation](https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-bedrock.html).
|
130
|
+
|
131
|
+
Also, see the
|
132
|
+
[litellm docs on Bedrock](https://litellm.vercel.app/docs/providers/bedrock).
|
@@ -0,0 +1,34 @@
|
|
1
|
+
---
|
2
|
+
parent: Connecting to LLMs
|
3
|
+
nav_order: 500
|
4
|
+
---
|
5
|
+
|
6
|
+
# Cohere
|
7
|
+
|
8
|
+
Cohere offers *free* API access to their models.
|
9
|
+
Their Command-R+ model works well with aider
|
10
|
+
as a *very basic* coding assistant.
|
11
|
+
You'll need a [Cohere API key](https://dashboard.cohere.com/welcome/login).
|
12
|
+
|
13
|
+
First, install aider:
|
14
|
+
|
15
|
+
{% include install.md %}
|
16
|
+
|
17
|
+
Then configure your API keys:
|
18
|
+
|
19
|
+
```
|
20
|
+
export COHERE_API_KEY=<key> # Mac/Linux
|
21
|
+
setx COHERE_API_KEY <key> # Windows, restart shell after setx
|
22
|
+
```
|
23
|
+
|
24
|
+
Start working with aider and Cohere on your codebase:
|
25
|
+
|
26
|
+
```bash
|
27
|
+
# Change directory into your codebase
|
28
|
+
cd /to/your/project
|
29
|
+
|
30
|
+
aider --model command-r-plus-08-2024
|
31
|
+
|
32
|
+
# List models available from Cohere
|
33
|
+
aider --list-models cohere_chat/
|
34
|
+
```
|
@@ -0,0 +1,32 @@
|
|
1
|
+
---
|
2
|
+
parent: Connecting to LLMs
|
3
|
+
nav_order: 500
|
4
|
+
---
|
5
|
+
|
6
|
+
# DeepSeek
|
7
|
+
|
8
|
+
Aider can connect to the DeepSeek.com API.
|
9
|
+
To work with DeepSeek's models, you need to set the `DEEPSEEK_API_KEY` environment variable with your [DeepSeek API key](https://platform.deepseek.com/api_keys).
|
10
|
+
The DeepSeek Chat V3 model has a top score on aider's code editing benchmark.
|
11
|
+
|
12
|
+
First, install aider:
|
13
|
+
|
14
|
+
{% include install.md %}
|
15
|
+
|
16
|
+
Then configure your API keys:
|
17
|
+
|
18
|
+
```
|
19
|
+
export DEEPSEEK_API_KEY=<key> # Mac/Linux
|
20
|
+
setx DEEPSEEK_API_KEY <key> # Windows, restart shell after setx
|
21
|
+
```
|
22
|
+
|
23
|
+
Start working with aider and DeepSeek on your codebase:
|
24
|
+
|
25
|
+
```bash
|
26
|
+
# Change directory into your codebase
|
27
|
+
cd /to/your/project
|
28
|
+
|
29
|
+
# Use DeepSeek Chat v3
|
30
|
+
aider --model deepseek/deepseek-chat
|
31
|
+
```
|
32
|
+
|
@@ -0,0 +1,49 @@
|
|
1
|
+
---
|
2
|
+
parent: Connecting to LLMs
|
3
|
+
nav_order: 300
|
4
|
+
---
|
5
|
+
|
6
|
+
# Gemini
|
7
|
+
|
8
|
+
You'll need a [Gemini API key](https://aistudio.google.com/app/u/2/apikey).
|
9
|
+
|
10
|
+
First, install aider:
|
11
|
+
|
12
|
+
{% include install.md %}
|
13
|
+
|
14
|
+
Then configure your API keys:
|
15
|
+
|
16
|
+
```bash
|
17
|
+
export GEMINI_API_KEY=<key> # Mac/Linux
|
18
|
+
setx GEMINI_API_KEY <key> # Windows, restart shell after setx
|
19
|
+
```
|
20
|
+
|
21
|
+
Start working with aider and Gemini on your codebase:
|
22
|
+
|
23
|
+
|
24
|
+
```bash
|
25
|
+
# Change directory into your codebase
|
26
|
+
cd /to/your/project
|
27
|
+
|
28
|
+
# You can run the Gemini 2.5 Pro model with this shortcut:
|
29
|
+
aider --model gemini
|
30
|
+
|
31
|
+
# You can run the Gemini 2.5 Pro Exp for free, with usage limits:
|
32
|
+
aider --model gemini-exp
|
33
|
+
|
34
|
+
# List models available from Gemini
|
35
|
+
aider --list-models gemini/
|
36
|
+
```
|
37
|
+
|
38
|
+
You may need to install the `google-generativeai` package.
|
39
|
+
|
40
|
+
```bash
|
41
|
+
# If you installed with aider-install or `uv tool`
|
42
|
+
uv tool run --from aider-chat pip install google-generativeai
|
43
|
+
|
44
|
+
# Or with pipx...
|
45
|
+
pipx inject aider-chat google-generativeai
|
46
|
+
|
47
|
+
# Or with pip
|
48
|
+
pip install -U google-generativeai
|
49
|
+
```
|
@@ -0,0 +1,105 @@
|
|
1
|
+
---
|
2
|
+
parent: Connecting to LLMs
|
3
|
+
nav_order: 510
|
4
|
+
---
|
5
|
+
|
6
|
+
# GitHub Copilot
|
7
|
+
|
8
|
+
Aider can connect to GitHub Copilot’s LLMs because Copilot exposes a standard **OpenAI-style**
|
9
|
+
endpoint at:
|
10
|
+
|
11
|
+
```
|
12
|
+
https://api.githubcopilot.com
|
13
|
+
```
|
14
|
+
|
15
|
+
First, install aider:
|
16
|
+
|
17
|
+
{% include install.md %}
|
18
|
+
|
19
|
+
---
|
20
|
+
|
21
|
+
## Configure your environment
|
22
|
+
|
23
|
+
```bash
|
24
|
+
# macOS/Linux
|
25
|
+
export OPENAI_API_BASE=https://api.githubcopilot.com
|
26
|
+
export OPENAI_API_KEY=<oauth_token>
|
27
|
+
|
28
|
+
# Windows (PowerShell)
|
29
|
+
setx OPENAI_API_BASE https://api.githubcopilot.com
|
30
|
+
setx OPENAI_API_KEY <oauth_token>
|
31
|
+
# …restart the shell after setx commands
|
32
|
+
```
|
33
|
+
|
34
|
+
---
|
35
|
+
|
36
|
+
### Where do I get the token?
|
37
|
+
The easiest path is to sign in to Copilot from any JetBrains IDE (PyCharm, GoLand, etc).
|
38
|
+
After you authenticate a file appears:
|
39
|
+
|
40
|
+
```
|
41
|
+
~/.config/github-copilot/apps.json
|
42
|
+
```
|
43
|
+
|
44
|
+
Copy the `oauth_token` value – that string is your `OPENAI_API_KEY`.
|
45
|
+
|
46
|
+
*Note:* tokens created by the Neovim **copilot.lua** plugin (old `hosts.json`) sometimes lack the
|
47
|
+
needed scopes. If you see “access to this endpoint is forbidden”, regenerate the token with a
|
48
|
+
JetBrains IDE or the VS Code Copilot extension.
|
49
|
+
|
50
|
+
---
|
51
|
+
|
52
|
+
## Discover available models
|
53
|
+
|
54
|
+
Copilot hosts many models (OpenAI, Anthropic, Google, etc).
|
55
|
+
List the models your subscription allows with:
|
56
|
+
|
57
|
+
```bash
|
58
|
+
curl -s https://api.githubcopilot.com/models \
|
59
|
+
-H "Authorization: Bearer $OPENAI_API_KEY" \
|
60
|
+
-H "Content-Type: application/json" \
|
61
|
+
-H "Copilot-Integration-Id: vscode-chat" | jq -r '.data[].id'
|
62
|
+
```
|
63
|
+
|
64
|
+
Each returned ID can be used with aider by **prefixing it with `openai/`**:
|
65
|
+
|
66
|
+
```bash
|
67
|
+
aider --model openai/gpt-4o
|
68
|
+
# or
|
69
|
+
aider --model openai/claude-3.7-sonnet-thought
|
70
|
+
```
|
71
|
+
|
72
|
+
---
|
73
|
+
|
74
|
+
## Quick start
|
75
|
+
|
76
|
+
```bash
|
77
|
+
# change into your project
|
78
|
+
cd /to/your/project
|
79
|
+
|
80
|
+
# talk to Copilot
|
81
|
+
aider --model openai/gpt-4o
|
82
|
+
```
|
83
|
+
|
84
|
+
---
|
85
|
+
|
86
|
+
## Optional config file (`~/.aider.conf.yml`)
|
87
|
+
|
88
|
+
```yaml
|
89
|
+
openai-api-base: https://api.githubcopilot.com
|
90
|
+
openai-api-key: "<oauth_token>"
|
91
|
+
model: openai/gpt-4o
|
92
|
+
weak-model: openai/gpt-4o-mini
|
93
|
+
show-model-warnings: false
|
94
|
+
```
|
95
|
+
|
96
|
+
---
|
97
|
+
|
98
|
+
## FAQ
|
99
|
+
|
100
|
+
* Calls made through aider are billed through your Copilot subscription
|
101
|
+
(aider will still print *estimated* costs).
|
102
|
+
* The Copilot docs explicitly allow third-party “agents” that hit this API – aider is playing by
|
103
|
+
the rules.
|
104
|
+
* Aider talks directly to the REST endpoint—no web-UI scraping or browser automation.
|
105
|
+
|
@@ -0,0 +1,36 @@
|
|
1
|
+
---
|
2
|
+
parent: Connecting to LLMs
|
3
|
+
nav_order: 400
|
4
|
+
---
|
5
|
+
|
6
|
+
# GROQ
|
7
|
+
|
8
|
+
Groq currently offers *free* API access to the models they host.
|
9
|
+
The Llama 3 70B model works
|
10
|
+
well with aider and is comparable to GPT-3.5 in code editing performance.
|
11
|
+
You'll need a [Groq API key](https://console.groq.com/keys).
|
12
|
+
|
13
|
+
First, install aider:
|
14
|
+
|
15
|
+
{% include install.md %}
|
16
|
+
|
17
|
+
Then configure your API keys:
|
18
|
+
|
19
|
+
```
|
20
|
+
export GROQ_API_KEY=<key> # Mac/Linux
|
21
|
+
setx GROQ_API_KEY <key> # Windows, restart shell after setx
|
22
|
+
```
|
23
|
+
|
24
|
+
Start working with aider and Groq on your codebase:
|
25
|
+
|
26
|
+
```bash
|
27
|
+
# Change directory into your codebase
|
28
|
+
cd /to/your/project
|
29
|
+
|
30
|
+
aider --model groq/llama3-70b-8192
|
31
|
+
|
32
|
+
# List models available from Groq
|
33
|
+
aider --list-models groq/
|
34
|
+
```
|
35
|
+
|
36
|
+
|
@@ -0,0 +1,39 @@
|
|
1
|
+
---
|
2
|
+
parent: Connecting to LLMs
|
3
|
+
nav_order: 400
|
4
|
+
---
|
5
|
+
|
6
|
+
# LM Studio
|
7
|
+
|
8
|
+
Aider can connect to models served by LM Studio.
|
9
|
+
|
10
|
+
First, install aider:
|
11
|
+
|
12
|
+
{% include install.md %}
|
13
|
+
|
14
|
+
Then configure your API key and endpoint:
|
15
|
+
|
16
|
+
```
|
17
|
+
# Must set a value here even if its a dummy value
|
18
|
+
export LM_STUDIO_API_KEY=dummy-api-key # Mac/Linux
|
19
|
+
setx LM_STUDIO_API_KEY dummy-api-key # Windows, restart shell after setx
|
20
|
+
|
21
|
+
# LM Studio default server URL is http://localhost:1234/v1
|
22
|
+
export LM_STUDIO_API_BASE=http://localhost:1234/v1 # Mac/Linux
|
23
|
+
setx LM_STUDIO_API_BASE http://localhost:1234/v1 # Windows, restart shell after setx
|
24
|
+
```
|
25
|
+
|
26
|
+
**Note:** Even though LM Studio doesn't require an API Key out of the box the `LM_STUDIO_API_KEY` must have a dummy value like `dummy-api-key` set or the client request will fail trying to send an empty `Bearer` token.
|
27
|
+
|
28
|
+
Start working with aider and LM Studio on your codebase:
|
29
|
+
|
30
|
+
```bash
|
31
|
+
# Change directory into your codebase
|
32
|
+
cd /to/your/project
|
33
|
+
|
34
|
+
aider --model lm_studio/<your-model-name>
|
35
|
+
```
|
36
|
+
|
37
|
+
See the [model warnings](warnings.html)
|
38
|
+
section for information on warnings which will occur
|
39
|
+
when working with models that aider is not familiar with.
|
@@ -0,0 +1,75 @@
|
|
1
|
+
---
|
2
|
+
parent: Connecting to LLMs
|
3
|
+
nav_order: 500
|
4
|
+
---
|
5
|
+
|
6
|
+
# Ollama
|
7
|
+
|
8
|
+
Aider can connect to local Ollama models.
|
9
|
+
|
10
|
+
First, install aider:
|
11
|
+
|
12
|
+
{% include install.md %}
|
13
|
+
|
14
|
+
Then configure your Ollama API endpoint (usually the default):
|
15
|
+
|
16
|
+
```bash
|
17
|
+
export OLLAMA_API_BASE=http://127.0.0.1:11434 # Mac/Linux
|
18
|
+
setx OLLAMA_API_BASE http://127.0.0.1:11434 # Windows, restart shell after setx
|
19
|
+
```
|
20
|
+
|
21
|
+
Start working with aider and Ollama on your codebase:
|
22
|
+
|
23
|
+
```
|
24
|
+
# Pull the model
|
25
|
+
ollama pull <model>
|
26
|
+
|
27
|
+
# Start your ollama server, increasing the context window to 8k tokens
|
28
|
+
OLLAMA_CONTEXT_LENGTH=8192 ollama serve
|
29
|
+
|
30
|
+
# In another terminal window, change directory into your codebase
|
31
|
+
cd /to/your/project
|
32
|
+
|
33
|
+
aider --model ollama_chat/<model>
|
34
|
+
```
|
35
|
+
|
36
|
+
{: .note }
|
37
|
+
Using `ollama_chat/` is recommended over `ollama/`.
|
38
|
+
|
39
|
+
|
40
|
+
See the [model warnings](warnings.html)
|
41
|
+
section for information on warnings which will occur
|
42
|
+
when working with models that aider is not familiar with.
|
43
|
+
|
44
|
+
## API Key
|
45
|
+
|
46
|
+
If you are using an ollama that requires an API key you can set `OLLAMA_API_KEY`:
|
47
|
+
|
48
|
+
```
|
49
|
+
export OLLAMA_API_KEY=<api-key> # Mac/Linux
|
50
|
+
setx OLLAMA_API_KEY <api-key> # Windows, restart shell after setx
|
51
|
+
```
|
52
|
+
|
53
|
+
## Setting the context window size
|
54
|
+
|
55
|
+
[Ollama uses a 2k context window by default](https://github.com/ollama/ollama/blob/main/docs/faq.md#how-can-i-specify-the-context-window-size),
|
56
|
+
which is very small for working with aider.
|
57
|
+
It also **silently** discards context that exceeds the window.
|
58
|
+
This is especially dangerous because many users don't even realize that most of their data
|
59
|
+
is being discarded by Ollama.
|
60
|
+
|
61
|
+
By default, aider sets Ollama's context window
|
62
|
+
to be large enough for each request you send plus 8k tokens for the reply.
|
63
|
+
This ensures data isn't silently discarded by Ollama.
|
64
|
+
|
65
|
+
If you'd like you can configure a fixed sized context window instead
|
66
|
+
with an
|
67
|
+
[`.aider.model.settings.yml` file](https://aider.chat/docs/config/adv-model-settings.html#model-settings)
|
68
|
+
like this:
|
69
|
+
|
70
|
+
```
|
71
|
+
- name: ollama/qwen2.5-coder:32b-instruct-fp16
|
72
|
+
extra_params:
|
73
|
+
num_ctx: 65536
|
74
|
+
```
|
75
|
+
|
@@ -0,0 +1,39 @@
|
|
1
|
+
---
|
2
|
+
parent: Connecting to LLMs
|
3
|
+
nav_order: 500
|
4
|
+
---
|
5
|
+
|
6
|
+
# OpenAI compatible APIs
|
7
|
+
|
8
|
+
Aider can connect to any LLM which is accessible via an OpenAI compatible API endpoint.
|
9
|
+
|
10
|
+
First, install aider:
|
11
|
+
|
12
|
+
{% include install.md %}
|
13
|
+
|
14
|
+
Then configure your API key and endpoint:
|
15
|
+
|
16
|
+
```
|
17
|
+
# Mac/Linux:
|
18
|
+
export OPENAI_API_BASE=<endpoint>
|
19
|
+
export OPENAI_API_KEY=<key>
|
20
|
+
|
21
|
+
# Windows:
|
22
|
+
setx OPENAI_API_BASE <endpoint>
|
23
|
+
setx OPENAI_API_KEY <key>
|
24
|
+
# ... restart shell after setx commands
|
25
|
+
```
|
26
|
+
|
27
|
+
Start working with aider and your OpenAI compatible API on your codebase:
|
28
|
+
|
29
|
+
```bash
|
30
|
+
# Change directory into your codebase
|
31
|
+
cd /to/your/project
|
32
|
+
|
33
|
+
# Prefix the model name with openai/
|
34
|
+
aider --model openai/<model-name>
|
35
|
+
```
|
36
|
+
|
37
|
+
See the [model warnings](warnings.html)
|
38
|
+
section for information on warnings which will occur
|
39
|
+
when working with models that aider is not familiar with.
|
@@ -0,0 +1,58 @@
|
|
1
|
+
---
|
2
|
+
parent: Connecting to LLMs
|
3
|
+
nav_order: 100
|
4
|
+
---
|
5
|
+
|
6
|
+
# OpenAI
|
7
|
+
|
8
|
+
To work with OpenAI's models, you need to provide your
|
9
|
+
[OpenAI API key](https://help.openai.com/en/articles/4936850-where-do-i-find-my-secret-api-key)
|
10
|
+
either in the `OPENAI_API_KEY` environment variable or
|
11
|
+
via the `--api-key openai=<key>` command line switch.
|
12
|
+
|
13
|
+
First, install aider:
|
14
|
+
|
15
|
+
{% include install.md %}
|
16
|
+
|
17
|
+
Then configure your API keys:
|
18
|
+
|
19
|
+
```
|
20
|
+
export OPENAI_API_KEY=<key> # Mac/Linux
|
21
|
+
setx OPENAI_API_KEY <key> # Windows, restart shell after setx
|
22
|
+
```
|
23
|
+
|
24
|
+
Start working with aider and OpenAI on your codebase:
|
25
|
+
|
26
|
+
```bash
|
27
|
+
# Change directory into your codebase
|
28
|
+
cd /to/your/project
|
29
|
+
|
30
|
+
# o3-mini
|
31
|
+
aider --model o3-mini
|
32
|
+
|
33
|
+
# o1-mini
|
34
|
+
aider --model o1-mini
|
35
|
+
|
36
|
+
# GPT-4o
|
37
|
+
aider --model gpt-4o
|
38
|
+
|
39
|
+
# List models available from OpenAI
|
40
|
+
aider --list-models openai/
|
41
|
+
```
|
42
|
+
|
43
|
+
You can use `aider --model <model-name>` to use any other OpenAI model.
|
44
|
+
For example, if you want to use a specific version of GPT-4 Turbo
|
45
|
+
you could do `aider --model gpt-4-0125-preview`.
|
46
|
+
|
47
|
+
## Reasoning models from other providers
|
48
|
+
|
49
|
+
Many of OpenAI's
|
50
|
+
"reasoning" models have restrictions on streaming and setting the temperature parameter.
|
51
|
+
Some also support different levels of "reasoning effort".
|
52
|
+
Aider is configured to work properly with these models
|
53
|
+
when served through major provider APIs and
|
54
|
+
has a `--reasoning-effort` setting.
|
55
|
+
|
56
|
+
You may need to [configure reasoning model settings](/docs/config/reasoning.html)
|
57
|
+
if you are using them through another provider
|
58
|
+
and see errors related to temperature or system prompt.
|
@@ -0,0 +1,78 @@
|
|
1
|
+
---
|
2
|
+
parent: Connecting to LLMs
|
3
|
+
nav_order: 500
|
4
|
+
---
|
5
|
+
|
6
|
+
# OpenRouter
|
7
|
+
|
8
|
+
Aider can connect to [models provided by OpenRouter](https://openrouter.ai/models?o=top-weekly):
|
9
|
+
You'll need an [OpenRouter API key](https://openrouter.ai/keys).
|
10
|
+
|
11
|
+
First, install aider:
|
12
|
+
|
13
|
+
{% include install.md %}
|
14
|
+
|
15
|
+
Then configure your API keys:
|
16
|
+
|
17
|
+
```
|
18
|
+
export OPENROUTER_API_KEY=<key> # Mac/Linux
|
19
|
+
setx OPENROUTER_API_KEY <key> # Windows, restart shell after setx
|
20
|
+
```
|
21
|
+
|
22
|
+
Start working with aider and OpenRouter on your codebase:
|
23
|
+
|
24
|
+
```bash
|
25
|
+
# Change directory into your codebase
|
26
|
+
cd /to/your/project
|
27
|
+
|
28
|
+
# Or any other open router model
|
29
|
+
aider --model openrouter/<provider>/<model>
|
30
|
+
|
31
|
+
# List models available from OpenRouter
|
32
|
+
aider --list-models openrouter/
|
33
|
+
```
|
34
|
+
|
35
|
+
In particular, many aider users access Sonnet via OpenRouter:
|
36
|
+
|
37
|
+
{: .tip }
|
38
|
+
If you get errors, check your
|
39
|
+
[OpenRouter privacy settings](https://openrouter.ai/settings/privacy).
|
40
|
+
Be sure to "enable providers that may train on inputs"
|
41
|
+
to allow use of all models.
|
42
|
+
|
43
|
+
## Controlling provider selection
|
44
|
+
|
45
|
+
OpenRouter often has multiple providers serving each model.
|
46
|
+
You can control which OpenRouter providers are used for your requests in two ways:
|
47
|
+
|
48
|
+
1. By "ignoring" certain providers in your
|
49
|
+
[OpenRouter account settings](https://openrouter.ai/settings/preferences).
|
50
|
+
This disables those named providers across all the models that you access via OpenRouter.
|
51
|
+
|
52
|
+
2. By configuring "provider routing" in a `.aider.model.settings.yml` file.
|
53
|
+
|
54
|
+
Place that file in your home directory or the root of your git project, with
|
55
|
+
entries like this:
|
56
|
+
|
57
|
+
```yaml
|
58
|
+
- name: openrouter/anthropic/claude-3.7-sonnet
|
59
|
+
extra_params:
|
60
|
+
extra_body:
|
61
|
+
provider:
|
62
|
+
# Only use these providers, in this order
|
63
|
+
order: ["Anthropic", "Together"]
|
64
|
+
# Don't fall back to other providers
|
65
|
+
allow_fallbacks: false
|
66
|
+
# Skip providers that may train on inputs
|
67
|
+
data_collection: "deny"
|
68
|
+
# Only use providers supporting all parameters
|
69
|
+
require_parameters: true
|
70
|
+
```
|
71
|
+
|
72
|
+
See [OpenRouter's provider routing docs](https://openrouter.ai/docs/provider-routing) for full details on these settings.
|
73
|
+
|
74
|
+
See [Advanced model settings](https://aider.chat/docs/config/adv-model-settings.html#model-settings)
|
75
|
+
for more details about model settings files.
|
76
|
+
|
77
|
+
|
78
|
+
|