cecli-dev 0.93.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (366) hide show
  1. cecli/__init__.py +20 -0
  2. cecli/__main__.py +4 -0
  3. cecli/_version.py +34 -0
  4. cecli/args.py +1092 -0
  5. cecli/args_formatter.py +228 -0
  6. cecli/change_tracker.py +133 -0
  7. cecli/coders/__init__.py +38 -0
  8. cecli/coders/agent_coder.py +1872 -0
  9. cecli/coders/architect_coder.py +63 -0
  10. cecli/coders/ask_coder.py +8 -0
  11. cecli/coders/base_coder.py +3993 -0
  12. cecli/coders/chat_chunks.py +116 -0
  13. cecli/coders/context_coder.py +52 -0
  14. cecli/coders/copypaste_coder.py +269 -0
  15. cecli/coders/editblock_coder.py +656 -0
  16. cecli/coders/editblock_fenced_coder.py +9 -0
  17. cecli/coders/editblock_func_coder.py +140 -0
  18. cecli/coders/editor_diff_fenced_coder.py +8 -0
  19. cecli/coders/editor_editblock_coder.py +8 -0
  20. cecli/coders/editor_whole_coder.py +8 -0
  21. cecli/coders/help_coder.py +15 -0
  22. cecli/coders/patch_coder.py +705 -0
  23. cecli/coders/search_replace.py +757 -0
  24. cecli/coders/shell.py +37 -0
  25. cecli/coders/single_wholefile_func_coder.py +101 -0
  26. cecli/coders/udiff_coder.py +428 -0
  27. cecli/coders/udiff_simple.py +12 -0
  28. cecli/coders/wholefile_coder.py +143 -0
  29. cecli/coders/wholefile_func_coder.py +133 -0
  30. cecli/commands/__init__.py +192 -0
  31. cecli/commands/add.py +226 -0
  32. cecli/commands/agent.py +51 -0
  33. cecli/commands/architect.py +46 -0
  34. cecli/commands/ask.py +44 -0
  35. cecli/commands/chat_mode.py +0 -0
  36. cecli/commands/clear.py +37 -0
  37. cecli/commands/code.py +46 -0
  38. cecli/commands/command_prefix.py +44 -0
  39. cecli/commands/commit.py +52 -0
  40. cecli/commands/context.py +47 -0
  41. cecli/commands/context_blocks.py +124 -0
  42. cecli/commands/context_management.py +51 -0
  43. cecli/commands/copy.py +62 -0
  44. cecli/commands/copy_context.py +81 -0
  45. cecli/commands/core.py +287 -0
  46. cecli/commands/diff.py +68 -0
  47. cecli/commands/drop.py +217 -0
  48. cecli/commands/editor.py +78 -0
  49. cecli/commands/exit.py +55 -0
  50. cecli/commands/git.py +57 -0
  51. cecli/commands/help.py +140 -0
  52. cecli/commands/history_search.py +40 -0
  53. cecli/commands/lint.py +109 -0
  54. cecli/commands/list_sessions.py +56 -0
  55. cecli/commands/load.py +85 -0
  56. cecli/commands/load_session.py +48 -0
  57. cecli/commands/load_skill.py +68 -0
  58. cecli/commands/ls.py +75 -0
  59. cecli/commands/map.py +37 -0
  60. cecli/commands/map_refresh.py +35 -0
  61. cecli/commands/model.py +118 -0
  62. cecli/commands/models.py +41 -0
  63. cecli/commands/multiline_mode.py +38 -0
  64. cecli/commands/paste.py +91 -0
  65. cecli/commands/quit.py +32 -0
  66. cecli/commands/read_only.py +267 -0
  67. cecli/commands/read_only_stub.py +270 -0
  68. cecli/commands/reasoning_effort.py +70 -0
  69. cecli/commands/remove_skill.py +68 -0
  70. cecli/commands/report.py +40 -0
  71. cecli/commands/reset.py +88 -0
  72. cecli/commands/run.py +99 -0
  73. cecli/commands/save.py +49 -0
  74. cecli/commands/save_session.py +43 -0
  75. cecli/commands/settings.py +69 -0
  76. cecli/commands/test.py +58 -0
  77. cecli/commands/think_tokens.py +74 -0
  78. cecli/commands/tokens.py +207 -0
  79. cecli/commands/undo.py +145 -0
  80. cecli/commands/utils/__init__.py +0 -0
  81. cecli/commands/utils/base_command.py +131 -0
  82. cecli/commands/utils/helpers.py +142 -0
  83. cecli/commands/utils/registry.py +53 -0
  84. cecli/commands/utils/save_load_manager.py +98 -0
  85. cecli/commands/voice.py +78 -0
  86. cecli/commands/weak_model.py +123 -0
  87. cecli/commands/web.py +87 -0
  88. cecli/deprecated_args.py +185 -0
  89. cecli/diffs.py +129 -0
  90. cecli/dump.py +29 -0
  91. cecli/editor.py +147 -0
  92. cecli/exceptions.py +115 -0
  93. cecli/format_settings.py +26 -0
  94. cecli/help.py +119 -0
  95. cecli/help_pats.py +19 -0
  96. cecli/helpers/__init__.py +9 -0
  97. cecli/helpers/copypaste.py +123 -0
  98. cecli/helpers/coroutines.py +8 -0
  99. cecli/helpers/file_searcher.py +142 -0
  100. cecli/helpers/model_providers.py +552 -0
  101. cecli/helpers/plugin_manager.py +81 -0
  102. cecli/helpers/profiler.py +162 -0
  103. cecli/helpers/requests.py +77 -0
  104. cecli/helpers/similarity.py +98 -0
  105. cecli/helpers/skills.py +577 -0
  106. cecli/history.py +186 -0
  107. cecli/io.py +1782 -0
  108. cecli/linter.py +304 -0
  109. cecli/llm.py +101 -0
  110. cecli/main.py +1280 -0
  111. cecli/mcp/__init__.py +154 -0
  112. cecli/mcp/oauth.py +250 -0
  113. cecli/mcp/server.py +278 -0
  114. cecli/mdstream.py +243 -0
  115. cecli/models.py +1255 -0
  116. cecli/onboarding.py +301 -0
  117. cecli/prompts/__init__.py +0 -0
  118. cecli/prompts/agent.yml +71 -0
  119. cecli/prompts/architect.yml +35 -0
  120. cecli/prompts/ask.yml +31 -0
  121. cecli/prompts/base.yml +99 -0
  122. cecli/prompts/context.yml +60 -0
  123. cecli/prompts/copypaste.yml +5 -0
  124. cecli/prompts/editblock.yml +143 -0
  125. cecli/prompts/editblock_fenced.yml +106 -0
  126. cecli/prompts/editblock_func.yml +25 -0
  127. cecli/prompts/editor_diff_fenced.yml +115 -0
  128. cecli/prompts/editor_editblock.yml +121 -0
  129. cecli/prompts/editor_whole.yml +46 -0
  130. cecli/prompts/help.yml +37 -0
  131. cecli/prompts/patch.yml +110 -0
  132. cecli/prompts/single_wholefile_func.yml +24 -0
  133. cecli/prompts/udiff.yml +106 -0
  134. cecli/prompts/udiff_simple.yml +13 -0
  135. cecli/prompts/utils/__init__.py +0 -0
  136. cecli/prompts/utils/prompt_registry.py +167 -0
  137. cecli/prompts/utils/system.py +56 -0
  138. cecli/prompts/wholefile.yml +50 -0
  139. cecli/prompts/wholefile_func.yml +24 -0
  140. cecli/queries/tree-sitter-language-pack/README.md +7 -0
  141. cecli/queries/tree-sitter-language-pack/arduino-tags.scm +5 -0
  142. cecli/queries/tree-sitter-language-pack/c-tags.scm +12 -0
  143. cecli/queries/tree-sitter-language-pack/chatito-tags.scm +16 -0
  144. cecli/queries/tree-sitter-language-pack/clojure-tags.scm +12 -0
  145. cecli/queries/tree-sitter-language-pack/commonlisp-tags.scm +127 -0
  146. cecli/queries/tree-sitter-language-pack/cpp-tags.scm +18 -0
  147. cecli/queries/tree-sitter-language-pack/csharp-tags.scm +32 -0
  148. cecli/queries/tree-sitter-language-pack/d-tags.scm +26 -0
  149. cecli/queries/tree-sitter-language-pack/dart-tags.scm +97 -0
  150. cecli/queries/tree-sitter-language-pack/elisp-tags.scm +5 -0
  151. cecli/queries/tree-sitter-language-pack/elixir-tags.scm +59 -0
  152. cecli/queries/tree-sitter-language-pack/elm-tags.scm +22 -0
  153. cecli/queries/tree-sitter-language-pack/gleam-tags.scm +41 -0
  154. cecli/queries/tree-sitter-language-pack/go-tags.scm +49 -0
  155. cecli/queries/tree-sitter-language-pack/java-tags.scm +26 -0
  156. cecli/queries/tree-sitter-language-pack/javascript-tags.scm +96 -0
  157. cecli/queries/tree-sitter-language-pack/lua-tags.scm +39 -0
  158. cecli/queries/tree-sitter-language-pack/matlab-tags.scm +10 -0
  159. cecli/queries/tree-sitter-language-pack/ocaml-tags.scm +115 -0
  160. cecli/queries/tree-sitter-language-pack/ocaml_interface-tags.scm +101 -0
  161. cecli/queries/tree-sitter-language-pack/pony-tags.scm +39 -0
  162. cecli/queries/tree-sitter-language-pack/properties-tags.scm +5 -0
  163. cecli/queries/tree-sitter-language-pack/python-tags.scm +24 -0
  164. cecli/queries/tree-sitter-language-pack/r-tags.scm +27 -0
  165. cecli/queries/tree-sitter-language-pack/racket-tags.scm +12 -0
  166. cecli/queries/tree-sitter-language-pack/ruby-tags.scm +69 -0
  167. cecli/queries/tree-sitter-language-pack/rust-tags.scm +63 -0
  168. cecli/queries/tree-sitter-language-pack/solidity-tags.scm +43 -0
  169. cecli/queries/tree-sitter-language-pack/swift-tags.scm +54 -0
  170. cecli/queries/tree-sitter-language-pack/udev-tags.scm +20 -0
  171. cecli/queries/tree-sitter-languages/README.md +24 -0
  172. cecli/queries/tree-sitter-languages/c-tags.scm +12 -0
  173. cecli/queries/tree-sitter-languages/c_sharp-tags.scm +52 -0
  174. cecli/queries/tree-sitter-languages/cpp-tags.scm +18 -0
  175. cecli/queries/tree-sitter-languages/dart-tags.scm +92 -0
  176. cecli/queries/tree-sitter-languages/elisp-tags.scm +8 -0
  177. cecli/queries/tree-sitter-languages/elixir-tags.scm +59 -0
  178. cecli/queries/tree-sitter-languages/elm-tags.scm +22 -0
  179. cecli/queries/tree-sitter-languages/fortran-tags.scm +18 -0
  180. cecli/queries/tree-sitter-languages/go-tags.scm +36 -0
  181. cecli/queries/tree-sitter-languages/haskell-tags.scm +5 -0
  182. cecli/queries/tree-sitter-languages/hcl-tags.scm +77 -0
  183. cecli/queries/tree-sitter-languages/java-tags.scm +26 -0
  184. cecli/queries/tree-sitter-languages/javascript-tags.scm +96 -0
  185. cecli/queries/tree-sitter-languages/julia-tags.scm +60 -0
  186. cecli/queries/tree-sitter-languages/kotlin-tags.scm +30 -0
  187. cecli/queries/tree-sitter-languages/matlab-tags.scm +10 -0
  188. cecli/queries/tree-sitter-languages/ocaml-tags.scm +115 -0
  189. cecli/queries/tree-sitter-languages/ocaml_interface-tags.scm +104 -0
  190. cecli/queries/tree-sitter-languages/php-tags.scm +32 -0
  191. cecli/queries/tree-sitter-languages/python-tags.scm +22 -0
  192. cecli/queries/tree-sitter-languages/ql-tags.scm +26 -0
  193. cecli/queries/tree-sitter-languages/ruby-tags.scm +69 -0
  194. cecli/queries/tree-sitter-languages/rust-tags.scm +63 -0
  195. cecli/queries/tree-sitter-languages/scala-tags.scm +64 -0
  196. cecli/queries/tree-sitter-languages/typescript-tags.scm +44 -0
  197. cecli/queries/tree-sitter-languages/zig-tags.scm +20 -0
  198. cecli/reasoning_tags.py +82 -0
  199. cecli/repo.py +626 -0
  200. cecli/repomap.py +1368 -0
  201. cecli/report.py +260 -0
  202. cecli/resources/__init__.py +3 -0
  203. cecli/resources/model-metadata.json +25751 -0
  204. cecli/resources/model-settings.yml +2394 -0
  205. cecli/resources/providers.json +67 -0
  206. cecli/run_cmd.py +143 -0
  207. cecli/scrape.py +295 -0
  208. cecli/sendchat.py +250 -0
  209. cecli/sessions.py +281 -0
  210. cecli/special.py +203 -0
  211. cecli/tools/__init__.py +72 -0
  212. cecli/tools/command.py +103 -0
  213. cecli/tools/command_interactive.py +113 -0
  214. cecli/tools/context_manager.py +175 -0
  215. cecli/tools/delete_block.py +154 -0
  216. cecli/tools/delete_line.py +120 -0
  217. cecli/tools/delete_lines.py +144 -0
  218. cecli/tools/extract_lines.py +281 -0
  219. cecli/tools/finished.py +35 -0
  220. cecli/tools/git_branch.py +132 -0
  221. cecli/tools/git_diff.py +49 -0
  222. cecli/tools/git_log.py +43 -0
  223. cecli/tools/git_remote.py +39 -0
  224. cecli/tools/git_show.py +37 -0
  225. cecli/tools/git_status.py +32 -0
  226. cecli/tools/grep.py +242 -0
  227. cecli/tools/indent_lines.py +195 -0
  228. cecli/tools/insert_block.py +263 -0
  229. cecli/tools/list_changes.py +71 -0
  230. cecli/tools/load_skill.py +51 -0
  231. cecli/tools/ls.py +77 -0
  232. cecli/tools/remove_skill.py +51 -0
  233. cecli/tools/replace_all.py +113 -0
  234. cecli/tools/replace_line.py +135 -0
  235. cecli/tools/replace_lines.py +180 -0
  236. cecli/tools/replace_text.py +186 -0
  237. cecli/tools/show_numbered_context.py +137 -0
  238. cecli/tools/thinking.py +52 -0
  239. cecli/tools/undo_change.py +82 -0
  240. cecli/tools/update_todo_list.py +148 -0
  241. cecli/tools/utils/base_tool.py +64 -0
  242. cecli/tools/utils/helpers.py +359 -0
  243. cecli/tools/utils/output.py +119 -0
  244. cecli/tools/utils/registry.py +145 -0
  245. cecli/tools/view_files_matching.py +138 -0
  246. cecli/tools/view_files_with_symbol.py +117 -0
  247. cecli/tui/__init__.py +83 -0
  248. cecli/tui/app.py +971 -0
  249. cecli/tui/io.py +566 -0
  250. cecli/tui/styles.tcss +117 -0
  251. cecli/tui/widgets/__init__.py +19 -0
  252. cecli/tui/widgets/completion_bar.py +331 -0
  253. cecli/tui/widgets/file_list.py +76 -0
  254. cecli/tui/widgets/footer.py +165 -0
  255. cecli/tui/widgets/input_area.py +320 -0
  256. cecli/tui/widgets/key_hints.py +16 -0
  257. cecli/tui/widgets/output.py +354 -0
  258. cecli/tui/widgets/status_bar.py +279 -0
  259. cecli/tui/worker.py +160 -0
  260. cecli/urls.py +16 -0
  261. cecli/utils.py +499 -0
  262. cecli/versioncheck.py +90 -0
  263. cecli/voice.py +90 -0
  264. cecli/waiting.py +38 -0
  265. cecli/watch.py +316 -0
  266. cecli/watch_prompts.py +12 -0
  267. cecli/website/Gemfile +8 -0
  268. cecli/website/_includes/blame.md +162 -0
  269. cecli/website/_includes/get-started.md +22 -0
  270. cecli/website/_includes/help-tip.md +5 -0
  271. cecli/website/_includes/help.md +24 -0
  272. cecli/website/_includes/install.md +5 -0
  273. cecli/website/_includes/keys.md +4 -0
  274. cecli/website/_includes/model-warnings.md +67 -0
  275. cecli/website/_includes/multi-line.md +22 -0
  276. cecli/website/_includes/python-m-aider.md +5 -0
  277. cecli/website/_includes/recording.css +228 -0
  278. cecli/website/_includes/recording.md +34 -0
  279. cecli/website/_includes/replit-pipx.md +9 -0
  280. cecli/website/_includes/works-best.md +1 -0
  281. cecli/website/_sass/custom/custom.scss +103 -0
  282. cecli/website/docs/config/adv-model-settings.md +2498 -0
  283. cecli/website/docs/config/agent-mode.md +320 -0
  284. cecli/website/docs/config/aider_conf.md +548 -0
  285. cecli/website/docs/config/api-keys.md +90 -0
  286. cecli/website/docs/config/custom-commands.md +187 -0
  287. cecli/website/docs/config/dotenv.md +493 -0
  288. cecli/website/docs/config/editor.md +127 -0
  289. cecli/website/docs/config/mcp.md +210 -0
  290. cecli/website/docs/config/model-aliases.md +173 -0
  291. cecli/website/docs/config/options.md +890 -0
  292. cecli/website/docs/config/reasoning.md +210 -0
  293. cecli/website/docs/config/skills.md +172 -0
  294. cecli/website/docs/config/tui.md +126 -0
  295. cecli/website/docs/config.md +44 -0
  296. cecli/website/docs/faq.md +379 -0
  297. cecli/website/docs/git.md +76 -0
  298. cecli/website/docs/index.md +47 -0
  299. cecli/website/docs/install/codespaces.md +39 -0
  300. cecli/website/docs/install/docker.md +48 -0
  301. cecli/website/docs/install/optional.md +100 -0
  302. cecli/website/docs/install/replit.md +8 -0
  303. cecli/website/docs/install.md +115 -0
  304. cecli/website/docs/languages.md +264 -0
  305. cecli/website/docs/legal/contributor-agreement.md +111 -0
  306. cecli/website/docs/legal/privacy.md +104 -0
  307. cecli/website/docs/llms/anthropic.md +77 -0
  308. cecli/website/docs/llms/azure.md +48 -0
  309. cecli/website/docs/llms/bedrock.md +132 -0
  310. cecli/website/docs/llms/cohere.md +34 -0
  311. cecli/website/docs/llms/deepseek.md +32 -0
  312. cecli/website/docs/llms/gemini.md +49 -0
  313. cecli/website/docs/llms/github.md +111 -0
  314. cecli/website/docs/llms/groq.md +36 -0
  315. cecli/website/docs/llms/lm-studio.md +39 -0
  316. cecli/website/docs/llms/ollama.md +75 -0
  317. cecli/website/docs/llms/openai-compat.md +39 -0
  318. cecli/website/docs/llms/openai.md +58 -0
  319. cecli/website/docs/llms/openrouter.md +78 -0
  320. cecli/website/docs/llms/other.md +117 -0
  321. cecli/website/docs/llms/vertex.md +50 -0
  322. cecli/website/docs/llms/warnings.md +10 -0
  323. cecli/website/docs/llms/xai.md +53 -0
  324. cecli/website/docs/llms.md +54 -0
  325. cecli/website/docs/more/analytics.md +127 -0
  326. cecli/website/docs/more/edit-formats.md +116 -0
  327. cecli/website/docs/more/infinite-output.md +192 -0
  328. cecli/website/docs/more-info.md +8 -0
  329. cecli/website/docs/recordings/auto-accept-architect.md +31 -0
  330. cecli/website/docs/recordings/dont-drop-original-read-files.md +35 -0
  331. cecli/website/docs/recordings/index.md +21 -0
  332. cecli/website/docs/recordings/model-accepts-settings.md +69 -0
  333. cecli/website/docs/recordings/tree-sitter-language-pack.md +80 -0
  334. cecli/website/docs/repomap.md +112 -0
  335. cecli/website/docs/scripting.md +100 -0
  336. cecli/website/docs/sessions.md +213 -0
  337. cecli/website/docs/troubleshooting/aider-not-found.md +24 -0
  338. cecli/website/docs/troubleshooting/edit-errors.md +76 -0
  339. cecli/website/docs/troubleshooting/imports.md +62 -0
  340. cecli/website/docs/troubleshooting/models-and-keys.md +54 -0
  341. cecli/website/docs/troubleshooting/support.md +79 -0
  342. cecli/website/docs/troubleshooting/token-limits.md +96 -0
  343. cecli/website/docs/troubleshooting/warnings.md +12 -0
  344. cecli/website/docs/troubleshooting.md +11 -0
  345. cecli/website/docs/usage/browser.md +57 -0
  346. cecli/website/docs/usage/caching.md +49 -0
  347. cecli/website/docs/usage/commands.md +133 -0
  348. cecli/website/docs/usage/conventions.md +119 -0
  349. cecli/website/docs/usage/copypaste.md +136 -0
  350. cecli/website/docs/usage/images-urls.md +48 -0
  351. cecli/website/docs/usage/lint-test.md +118 -0
  352. cecli/website/docs/usage/modes.md +211 -0
  353. cecli/website/docs/usage/not-code.md +179 -0
  354. cecli/website/docs/usage/notifications.md +87 -0
  355. cecli/website/docs/usage/tips.md +79 -0
  356. cecli/website/docs/usage/tutorials.md +30 -0
  357. cecli/website/docs/usage/voice.md +121 -0
  358. cecli/website/docs/usage/watch.md +294 -0
  359. cecli/website/docs/usage.md +102 -0
  360. cecli/website/share/index.md +101 -0
  361. cecli_dev-0.93.1.dist-info/METADATA +549 -0
  362. cecli_dev-0.93.1.dist-info/RECORD +366 -0
  363. cecli_dev-0.93.1.dist-info/WHEEL +5 -0
  364. cecli_dev-0.93.1.dist-info/entry_points.txt +4 -0
  365. cecli_dev-0.93.1.dist-info/licenses/LICENSE.txt +202 -0
  366. cecli_dev-0.93.1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,77 @@
1
+ ---
2
+ parent: Connecting to LLMs
3
+ nav_order: 200
4
+ ---
5
+
6
+ # Anthropic
7
+
8
+ To work with Anthropic's models, you need to provide your
9
+ [Anthropic API key](https://docs.anthropic.com/claude/reference/getting-started-with-the-api)
10
+ either in the `ANTHROPIC_API_KEY` environment variable or
11
+ via the `--anthropic-api-key` command line switch.
12
+
13
+ First, install aider:
14
+
15
+ {% include install.md %}
16
+
17
+ Then configure your API keys:
18
+
19
+ ```
20
+ export ANTHROPIC_API_KEY=<key> # Mac/Linux
21
+ setx ANTHROPIC_API_KEY <key> # Windows, restart shell after setx
22
+ ```
23
+
24
+ Start working with aider and Anthropic on your codebase:
25
+
26
+ ```bash
27
+ # Change directory into your codebase
28
+ cd /to/your/project
29
+
30
+ # Aider uses Claude 3.7 Sonnet by default
31
+ aider
32
+
33
+ # List models available from Anthropic
34
+ aider --list-models anthropic/
35
+ ```
36
+
37
+ {: .tip }
38
+ Anthropic has very low rate limits.
39
+ You can access all the Anthropic models via
40
+ [OpenRouter](openrouter.md)
41
+ or [Google Vertex AI](vertex.md)
42
+ with more generous rate limits.
43
+
44
+ You can use `aider --model <model-name>` to use any other Anthropic model.
45
+ For example, if you want to use a specific version of Opus
46
+ you could do `aider --model claude-3-opus-20240229`.
47
+
48
+ ## Thinking tokens
49
+
50
+ Aider can work with Sonnet 3.7's new thinking tokens, but does not ask Sonnet to use
51
+ thinking tokens by default.
52
+
53
+ Enabling thinking currently requires manual configuration.
54
+ You need to add the following to your `.aider.model.settings.yml`
55
+ [model settings file](/docs/config/adv-model-settings.html#model-settings).
56
+ Adjust the `budget_tokens` value to change the target number of thinking tokens.
57
+
58
+ ```yaml
59
+ - name: anthropic/claude-3-7-sonnet-20250219
60
+ edit_format: diff
61
+ weak_model_name: anthropic/claude-3-5-haiku-20241022
62
+ use_repo_map: true
63
+ examples_as_sys_msg: true
64
+ use_temperature: false
65
+ extra_params:
66
+ extra_headers:
67
+ anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
68
+ max_tokens: 64000
69
+ thinking:
70
+ type: enabled
71
+ budget_tokens: 32000 # Adjust this number
72
+ cache_control: true
73
+ editor_model_name: anthropic/claude-3-7-sonnet-20250219
74
+ editor_edit_format: editor-diff
75
+ ```
76
+
77
+ More streamlined support will be coming soon.
@@ -0,0 +1,48 @@
1
+ ---
2
+ parent: Connecting to LLMs
3
+ nav_order: 500
4
+ ---
5
+
6
+ # Azure
7
+
8
+ Aider can connect to the OpenAI models on Azure.
9
+
10
+ First, install aider:
11
+
12
+ {% include install.md %}
13
+
14
+ Then configure your API keys and endpoint:
15
+
16
+ ```
17
+ # Mac/Linux:
18
+ export AZURE_API_KEY=<key>
19
+ export AZURE_API_VERSION=2024-12-01-preview
20
+ export AZURE_API_BASE=https://myendpt.openai.azure.com
21
+
22
+ # Windows
23
+ setx AZURE_API_KEY <key>
24
+ setx AZURE_API_VERSION 2024-12-01-preview
25
+ setx AZURE_API_BASE https://myendpt.openai.azure.com
26
+ # ... restart your shell after setx commands
27
+ ```
28
+
29
+ Start working with aider and Azure on your codebase:
30
+
31
+ ```bash
32
+ # Change directory into your codebase
33
+ cd /to/your/project
34
+
35
+ aider --model azure/<your_model_deployment_name>
36
+
37
+ # List models available from Azure
38
+ aider --list-models azure/
39
+ ```
40
+
41
+ Note that aider will also use environment variables
42
+ like `AZURE_OPENAI_API_xxx`.
43
+
44
+ The `aider --list-models azure/` command will list all models that aider supports through Azure, not the models that are available for the provided endpoint.
45
+
46
+ When setting the model to use with `--model azure/<your_model_deployment_name>`, `<your_model_deployment_name>` is likely just the name of the model you have deployed to the endpoint for example `o3-mini` or `gpt-4o`. The screenshow below shows `o3-mini` and `gpt-4o` deployments in the Azure portal done under the `myendpt` resource.
47
+
48
+ ![example azure deployment](/assets/azure-deployment.png)
@@ -0,0 +1,132 @@
1
+ ---
2
+ parent: Connecting to LLMs
3
+ nav_order: 560
4
+ ---
5
+
6
+ # Amazon Bedrock
7
+
8
+ Aider can connect to models provided by Amazon Bedrock.
9
+ To configure Aider to use the Amazon Bedrock API, you need to set up your AWS credentials.
10
+ This can be done using the AWS CLI or by setting environment variables.
11
+
12
+ ## Select a Model from Amazon Bedrock
13
+
14
+ Before you can use a model through Amazon Bedrock, you must "enable" the model under the **Model
15
+ Access** screen in the AWS Management Console.
16
+ To find the `Model ID`, open the **Model Catalog** area in the Bedrock console, select the model
17
+ you want to use, and the find the `modelId` property under the "Usage" heading.
18
+
19
+ ### Bedrock Inference Profiles
20
+
21
+ Amazon Bedrock has added support for a new feature called [cross-region "inference profiles."](https://aws.amazon.com/about-aws/whats-new/2024/09/amazon-bedrock-knowledge-bases-cross-region-inference/)
22
+ Some models hosted in Bedrock _only_ support these inference profiles.
23
+ If you're using one of these models, then you will need to use the `Inference Profile ID`
24
+ instead of the `Model ID` from the **Model Catalog** screen, in the AWS Management Console.
25
+ For example, the Claude Sonnet 3.7 model, release in February 2025, exclusively supports
26
+ inference through inference profiles. To use this model, you would use the
27
+ `us.anthropic.claude-3-7-sonnet-20250219-v1:0` Inference Profile ID.
28
+ In the Amazon Bedrock console, go to Inference and Assessment ➡️ Cross-region Inference
29
+ to find the `Inference Profile ID` value.
30
+
31
+ If you attempt to use a `Model ID` for a model that exclusively supports the Inference Profile
32
+ feature, you will receive an error message like the following:
33
+
34
+ > litellm.BadRequestError: BedrockException - b'{"message":"Invocation of model ID
35
+ anthropic.claude-3-7-sonnet-20250219-v1:0 with on-demand throughput isn\xe2\x80\x99t supported. Retry your
36
+ request with the ID or ARN of an inference profile that contains this model."}'
37
+
38
+ ## Installation and Configuration
39
+
40
+ First, install aider:
41
+
42
+ {% include install.md %}
43
+
44
+ Next, configure your AWS credentials. This can be done using the AWS CLI or by setting environment variables.
45
+
46
+ ## AWS CLI Configuration
47
+
48
+ If you haven't already, install the [AWS CLI](https://aws.amazon.com/cli/) and configure it with your credentials:
49
+
50
+ ```bash
51
+ aws configure
52
+ ```
53
+
54
+ This will prompt you to enter your AWS Access Key ID, Secret Access Key, and default region.
55
+
56
+ ## Environment Variables
57
+
58
+ You can set the following environment variables:
59
+
60
+ ```bash
61
+ export AWS_REGION=your_preferred_region
62
+
63
+ # For user authentication
64
+ export AWS_ACCESS_KEY_ID=your_access_key
65
+ export AWS_SECRET_ACCESS_KEY=your_secret_key
66
+
67
+ # For profile authentication
68
+ export AWS_PROFILE=your-profile
69
+ ```
70
+
71
+ You can add these to your
72
+ [.env file](/docs/config/dotenv.html).
73
+
74
+ ### Set Environment Variables with PowerShell
75
+
76
+ If you're using PowerShell on MacOS, Linux, or Windows, you can set the same AWS configuration environment variables with these commands.
77
+
78
+ ```pwsh
79
+ $env:AWS_ACCESS_KEY_ID = 'your_access_key'
80
+ $env:AWS_SECRET_ACCESS_KEY = 'your_secret_key'
81
+ $env:AWS_REGION = 'us-west-2' # Put whichever AWS region that you'd like, that the Bedrock service supports.
82
+ ```
83
+
84
+
85
+ ## Get Started
86
+
87
+ Once your AWS credentials are set up, you can run Aider with the `--model` command line switch, specifying the Bedrock model you want to use:
88
+
89
+ ```bash
90
+ # Change directory into your codebase
91
+ cd /to/your/project
92
+
93
+ aider --model bedrock/anthropic.claude-3-5-sonnet-20240620-v1:0
94
+ ```
95
+
96
+ Sometimes it seems to help if you prefix the model name with "us.":
97
+
98
+ ```bash
99
+ aider --model bedrock/us.anthropic.claude-3-5-sonnet-20240620-v1:0
100
+ ```
101
+
102
+
103
+ ## Available Models
104
+
105
+ To see some models available via Bedrock, run:
106
+
107
+ ```bash
108
+ aider --list-models bedrock/
109
+ ```
110
+
111
+ Make sure you have access to these models in your AWS account before attempting to use them with Aider.
112
+
113
+ ## Install boto3
114
+ You may need to install the `boto3` package.
115
+
116
+ ```bash
117
+ # If you installed with aider-install or `uv tool`
118
+ uv tool run --from aider-chat pip install boto3
119
+
120
+ # Or with pipx...
121
+ pipx inject aider-chat boto3
122
+
123
+ # Or with pip
124
+ pip install -U boto3
125
+ ```
126
+
127
+ # More info
128
+
129
+ For more information on Amazon Bedrock and its models, refer to the [official AWS documentation](https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-bedrock.html).
130
+
131
+ Also, see the
132
+ [litellm docs on Bedrock](https://litellm.vercel.app/docs/providers/bedrock).
@@ -0,0 +1,34 @@
1
+ ---
2
+ parent: Connecting to LLMs
3
+ nav_order: 500
4
+ ---
5
+
6
+ # Cohere
7
+
8
+ Cohere offers *free* API access to their models.
9
+ Their Command-R+ model works well with aider
10
+ as a *very basic* coding assistant.
11
+ You'll need a [Cohere API key](https://dashboard.cohere.com/welcome/login).
12
+
13
+ First, install aider:
14
+
15
+ {% include install.md %}
16
+
17
+ Then configure your API keys:
18
+
19
+ ```
20
+ export COHERE_API_KEY=<key> # Mac/Linux
21
+ setx COHERE_API_KEY <key> # Windows, restart shell after setx
22
+ ```
23
+
24
+ Start working with aider and Cohere on your codebase:
25
+
26
+ ```bash
27
+ # Change directory into your codebase
28
+ cd /to/your/project
29
+
30
+ aider --model command-r-plus-08-2024
31
+
32
+ # List models available from Cohere
33
+ aider --list-models cohere_chat/
34
+ ```
@@ -0,0 +1,32 @@
1
+ ---
2
+ parent: Connecting to LLMs
3
+ nav_order: 500
4
+ ---
5
+
6
+ # DeepSeek
7
+
8
+ Aider can connect to the DeepSeek.com API.
9
+ To work with DeepSeek's models, you need to set the `DEEPSEEK_API_KEY` environment variable with your [DeepSeek API key](https://platform.deepseek.com/api_keys).
10
+ The DeepSeek Chat V3 model has a top score on aider's code editing benchmark.
11
+
12
+ First, install aider:
13
+
14
+ {% include install.md %}
15
+
16
+ Then configure your API keys:
17
+
18
+ ```
19
+ export DEEPSEEK_API_KEY=<key> # Mac/Linux
20
+ setx DEEPSEEK_API_KEY <key> # Windows, restart shell after setx
21
+ ```
22
+
23
+ Start working with aider and DeepSeek on your codebase:
24
+
25
+ ```bash
26
+ # Change directory into your codebase
27
+ cd /to/your/project
28
+
29
+ # Use DeepSeek Chat v3
30
+ aider --model deepseek/deepseek-chat
31
+ ```
32
+
@@ -0,0 +1,49 @@
1
+ ---
2
+ parent: Connecting to LLMs
3
+ nav_order: 300
4
+ ---
5
+
6
+ # Gemini
7
+
8
+ You'll need a [Gemini API key](https://aistudio.google.com/app/u/2/apikey).
9
+
10
+ First, install aider:
11
+
12
+ {% include install.md %}
13
+
14
+ Then configure your API keys:
15
+
16
+ ```bash
17
+ export GEMINI_API_KEY=<key> # Mac/Linux
18
+ setx GEMINI_API_KEY <key> # Windows, restart shell after setx
19
+ ```
20
+
21
+ Start working with aider and Gemini on your codebase:
22
+
23
+
24
+ ```bash
25
+ # Change directory into your codebase
26
+ cd /to/your/project
27
+
28
+ # You can run the Gemini 2.5 Pro model with this shortcut:
29
+ aider --model gemini
30
+
31
+ # You can run the Gemini 2.5 Pro Exp for free, with usage limits:
32
+ aider --model gemini-exp
33
+
34
+ # List models available from Gemini
35
+ aider --list-models gemini/
36
+ ```
37
+
38
+ You may need to install the `google-generativeai` package.
39
+
40
+ ```bash
41
+ # If you installed with aider-install or `uv tool`
42
+ uv tool run --from aider-chat pip install google-generativeai
43
+
44
+ # Or with pipx...
45
+ pipx inject aider-chat google-generativeai
46
+
47
+ # Or with pip
48
+ pip install -U google-generativeai
49
+ ```
@@ -0,0 +1,111 @@
1
+ ---
2
+ parent: Connecting to LLMs
3
+ nav_order: 510
4
+ ---
5
+
6
+ # GitHub Copilot
7
+
8
+ Aider can connect to GitHub Copilot’s LLMs because Copilot exposes a standard **OpenAI-style**
9
+ endpoint at:
10
+
11
+ ```
12
+ https://api.githubcopilot.com
13
+ ```
14
+
15
+ First, install aider:
16
+
17
+ {% include install.md %}
18
+
19
+ ---
20
+
21
+ ## Configure your environment
22
+
23
+ ```bash
24
+ # macOS/Linux
25
+ export OPENAI_API_BASE=https://api.githubcopilot.com
26
+ export OPENAI_API_KEY=<oauth_token>
27
+
28
+ # Windows (PowerShell)
29
+ setx OPENAI_API_BASE https://api.githubcopilot.com
30
+ setx OPENAI_API_KEY <oauth_token>
31
+ # …restart the shell after setx commands
32
+ ```
33
+
34
+ ---
35
+
36
+ ### Where do I get the token?
37
+ The easiest path is to sign in to Copilot from any JetBrains IDE (PyCharm, GoLand, etc).
38
+ After you authenticate a file appears:
39
+
40
+ ```
41
+ ~/.config/github-copilot/apps.json
42
+ ```
43
+
44
+ On Windows the config can be found in:
45
+
46
+ ```
47
+ ~\AppData\Local\github-copilot\apps.json
48
+ ```
49
+
50
+ Copy the `oauth_token` value – that string is your `OPENAI_API_KEY`.
51
+
52
+ *Note:* tokens created by the Neovim **copilot.lua** plugin (old `hosts.json`) sometimes lack the
53
+ needed scopes. If you see “access to this endpoint is forbidden”, regenerate the token with a
54
+ JetBrains IDE.
55
+
56
+ ---
57
+
58
+ ## Discover available models
59
+
60
+ Copilot hosts many models (OpenAI, Anthropic, Google, etc).
61
+ List the models your subscription allows with:
62
+
63
+ ```bash
64
+ curl -s https://api.githubcopilot.com/models \
65
+ -H "Authorization: Bearer $OPENAI_API_KEY" \
66
+ -H "Content-Type: application/json" \
67
+ -H "Copilot-Integration-Id: vscode-chat" | jq -r '.data[].id'
68
+ ```
69
+
70
+ Each returned ID can be used with aider by **prefixing it with `openai/`**:
71
+
72
+ ```bash
73
+ aider --model openai/gpt-4o
74
+ # or
75
+ aider --model openai/claude-3.7-sonnet-thought
76
+ ```
77
+
78
+ ---
79
+
80
+ ## Quick start
81
+
82
+ ```bash
83
+ # change into your project
84
+ cd /to/your/project
85
+
86
+ # talk to Copilot
87
+ aider --model openai/gpt-4o
88
+ ```
89
+
90
+ ---
91
+
92
+ ## Optional config file (`~/.aider.conf.yml`)
93
+
94
+ ```yaml
95
+ openai-api-base: https://api.githubcopilot.com
96
+ openai-api-key: "<oauth_token>"
97
+ model: openai/gpt-4o
98
+ weak-model: openai/gpt-4o-mini
99
+ show-model-warnings: false
100
+ ```
101
+
102
+ ---
103
+
104
+ ## FAQ
105
+
106
+ * Calls made through aider are billed through your Copilot subscription
107
+ (aider will still print *estimated* costs).
108
+ * The Copilot docs explicitly allow third-party “agents” that hit this API – aider is playing by
109
+ the rules.
110
+ * Aider talks directly to the REST endpoint—no web-UI scraping or browser automation.
111
+
@@ -0,0 +1,36 @@
1
+ ---
2
+ parent: Connecting to LLMs
3
+ nav_order: 400
4
+ ---
5
+
6
+ # GROQ
7
+
8
+ Groq currently offers *free* API access to the models they host.
9
+ The Llama 3 70B model works
10
+ well with aider and is comparable to GPT-3.5 in code editing performance.
11
+ You'll need a [Groq API key](https://console.groq.com/keys).
12
+
13
+ First, install aider:
14
+
15
+ {% include install.md %}
16
+
17
+ Then configure your API keys:
18
+
19
+ ```
20
+ export GROQ_API_KEY=<key> # Mac/Linux
21
+ setx GROQ_API_KEY <key> # Windows, restart shell after setx
22
+ ```
23
+
24
+ Start working with aider and Groq on your codebase:
25
+
26
+ ```bash
27
+ # Change directory into your codebase
28
+ cd /to/your/project
29
+
30
+ aider --model groq/llama3-70b-8192
31
+
32
+ # List models available from Groq
33
+ aider --list-models groq/
34
+ ```
35
+
36
+
@@ -0,0 +1,39 @@
1
+ ---
2
+ parent: Connecting to LLMs
3
+ nav_order: 400
4
+ ---
5
+
6
+ # LM Studio
7
+
8
+ Aider can connect to models served by LM Studio.
9
+
10
+ First, install aider:
11
+
12
+ {% include install.md %}
13
+
14
+ Then configure your API key and endpoint:
15
+
16
+ ```
17
+ # Must set a value here even if its a dummy value
18
+ export LM_STUDIO_API_KEY=dummy-api-key # Mac/Linux
19
+ setx LM_STUDIO_API_KEY dummy-api-key # Windows, restart shell after setx
20
+
21
+ # LM Studio default server URL is http://localhost:1234/v1
22
+ export LM_STUDIO_API_BASE=http://localhost:1234/v1 # Mac/Linux
23
+ setx LM_STUDIO_API_BASE http://localhost:1234/v1 # Windows, restart shell after setx
24
+ ```
25
+
26
+ **Note:** Even though LM Studio doesn't require an API Key out of the box the `LM_STUDIO_API_KEY` must have a dummy value like `dummy-api-key` set or the client request will fail trying to send an empty `Bearer` token.
27
+
28
+ Start working with aider and LM Studio on your codebase:
29
+
30
+ ```bash
31
+ # Change directory into your codebase
32
+ cd /to/your/project
33
+
34
+ aider --model lm_studio/<your-model-name>
35
+ ```
36
+
37
+ See the [model warnings](warnings.html)
38
+ section for information on warnings which will occur
39
+ when working with models that aider is not familiar with.
@@ -0,0 +1,75 @@
1
+ ---
2
+ parent: Connecting to LLMs
3
+ nav_order: 500
4
+ ---
5
+
6
+ # Ollama
7
+
8
+ Aider can connect to local Ollama models.
9
+
10
+ First, install aider:
11
+
12
+ {% include install.md %}
13
+
14
+ Then configure your Ollama API endpoint (usually the default):
15
+
16
+ ```bash
17
+ export OLLAMA_API_BASE=http://127.0.0.1:11434 # Mac/Linux
18
+ setx OLLAMA_API_BASE http://127.0.0.1:11434 # Windows, restart shell after setx
19
+ ```
20
+
21
+ Start working with aider and Ollama on your codebase:
22
+
23
+ ```
24
+ # Pull the model
25
+ ollama pull <model>
26
+
27
+ # Start your ollama server, increasing the context window to 8k tokens
28
+ OLLAMA_CONTEXT_LENGTH=8192 ollama serve
29
+
30
+ # In another terminal window, change directory into your codebase
31
+ cd /to/your/project
32
+
33
+ aider --model ollama_chat/<model>
34
+ ```
35
+
36
+ {: .note }
37
+ Using `ollama_chat/` is recommended over `ollama/`.
38
+
39
+
40
+ See the [model warnings](warnings.html)
41
+ section for information on warnings which will occur
42
+ when working with models that aider is not familiar with.
43
+
44
+ ## API Key
45
+
46
+ If you are using an ollama that requires an API key you can set `OLLAMA_API_KEY`:
47
+
48
+ ```
49
+ export OLLAMA_API_KEY=<api-key> # Mac/Linux
50
+ setx OLLAMA_API_KEY <api-key> # Windows, restart shell after setx
51
+ ```
52
+
53
+ ## Setting the context window size
54
+
55
+ [Ollama uses a 2k context window by default](https://github.com/ollama/ollama/blob/main/docs/faq.md#how-can-i-specify-the-context-window-size),
56
+ which is very small for working with aider.
57
+ It also **silently** discards context that exceeds the window.
58
+ This is especially dangerous because many users don't even realize that most of their data
59
+ is being discarded by Ollama.
60
+
61
+ By default, aider sets Ollama's context window
62
+ to be large enough for each request you send plus 8k tokens for the reply.
63
+ This ensures data isn't silently discarded by Ollama.
64
+
65
+ If you'd like you can configure a fixed sized context window instead
66
+ with an
67
+ [`.aider.model.settings.yml` file](https://aider.chat/docs/config/adv-model-settings.html#model-settings)
68
+ like this:
69
+
70
+ ```
71
+ - name: ollama/qwen2.5-coder:32b-instruct-fp16
72
+ extra_params:
73
+ num_ctx: 65536
74
+ ```
75
+
@@ -0,0 +1,39 @@
1
+ ---
2
+ parent: Connecting to LLMs
3
+ nav_order: 500
4
+ ---
5
+
6
+ # OpenAI compatible APIs
7
+
8
+ Aider can connect to any LLM which is accessible via an OpenAI compatible API endpoint.
9
+
10
+ First, install aider:
11
+
12
+ {% include install.md %}
13
+
14
+ Then configure your API key and endpoint:
15
+
16
+ ```
17
+ # Mac/Linux:
18
+ export OPENAI_API_BASE=<endpoint>
19
+ export OPENAI_API_KEY=<key>
20
+
21
+ # Windows:
22
+ setx OPENAI_API_BASE <endpoint>
23
+ setx OPENAI_API_KEY <key>
24
+ # ... restart shell after setx commands
25
+ ```
26
+
27
+ Start working with aider and your OpenAI compatible API on your codebase:
28
+
29
+ ```bash
30
+ # Change directory into your codebase
31
+ cd /to/your/project
32
+
33
+ # Prefix the model name with openai/
34
+ aider --model openai/<model-name>
35
+ ```
36
+
37
+ See the [model warnings](warnings.html)
38
+ section for information on warnings which will occur
39
+ when working with models that aider is not familiar with.