chatmcp-cli 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aider/__init__.py +20 -0
- aider/__main__.py +4 -0
- aider/_version.py +21 -0
- aider/analytics.py +250 -0
- aider/args.py +926 -0
- aider/args_formatter.py +228 -0
- aider/coders/__init__.py +34 -0
- aider/coders/architect_coder.py +48 -0
- aider/coders/architect_prompts.py +40 -0
- aider/coders/ask_coder.py +9 -0
- aider/coders/ask_prompts.py +35 -0
- aider/coders/base_coder.py +2483 -0
- aider/coders/base_prompts.py +60 -0
- aider/coders/chat_chunks.py +64 -0
- aider/coders/context_coder.py +53 -0
- aider/coders/context_prompts.py +75 -0
- aider/coders/editblock_coder.py +657 -0
- aider/coders/editblock_fenced_coder.py +10 -0
- aider/coders/editblock_fenced_prompts.py +143 -0
- aider/coders/editblock_func_coder.py +141 -0
- aider/coders/editblock_func_prompts.py +27 -0
- aider/coders/editblock_prompts.py +174 -0
- aider/coders/editor_diff_fenced_coder.py +9 -0
- aider/coders/editor_diff_fenced_prompts.py +11 -0
- aider/coders/editor_editblock_coder.py +8 -0
- aider/coders/editor_editblock_prompts.py +18 -0
- aider/coders/editor_whole_coder.py +8 -0
- aider/coders/editor_whole_prompts.py +10 -0
- aider/coders/help_coder.py +16 -0
- aider/coders/help_prompts.py +46 -0
- aider/coders/patch_coder.py +706 -0
- aider/coders/patch_prompts.py +161 -0
- aider/coders/search_replace.py +757 -0
- aider/coders/shell.py +37 -0
- aider/coders/single_wholefile_func_coder.py +102 -0
- aider/coders/single_wholefile_func_prompts.py +27 -0
- aider/coders/udiff_coder.py +429 -0
- aider/coders/udiff_prompts.py +115 -0
- aider/coders/udiff_simple.py +14 -0
- aider/coders/udiff_simple_prompts.py +25 -0
- aider/coders/wholefile_coder.py +144 -0
- aider/coders/wholefile_func_coder.py +134 -0
- aider/coders/wholefile_func_prompts.py +27 -0
- aider/coders/wholefile_prompts.py +67 -0
- aider/commands.py +1665 -0
- aider/copypaste.py +72 -0
- aider/deprecated.py +126 -0
- aider/diffs.py +128 -0
- aider/dump.py +29 -0
- aider/editor.py +147 -0
- aider/exceptions.py +107 -0
- aider/format_settings.py +26 -0
- aider/gui.py +545 -0
- aider/help.py +163 -0
- aider/help_pats.py +19 -0
- aider/history.py +143 -0
- aider/io.py +1175 -0
- aider/linter.py +304 -0
- aider/llm.py +47 -0
- aider/main.py +1267 -0
- aider/mdstream.py +243 -0
- aider/models.py +1286 -0
- aider/onboarding.py +428 -0
- aider/openrouter.py +128 -0
- aider/prompts.py +64 -0
- aider/queries/tree-sitter-language-pack/README.md +7 -0
- aider/queries/tree-sitter-language-pack/arduino-tags.scm +5 -0
- aider/queries/tree-sitter-language-pack/c-tags.scm +9 -0
- aider/queries/tree-sitter-language-pack/chatito-tags.scm +16 -0
- aider/queries/tree-sitter-language-pack/commonlisp-tags.scm +122 -0
- aider/queries/tree-sitter-language-pack/cpp-tags.scm +15 -0
- aider/queries/tree-sitter-language-pack/csharp-tags.scm +26 -0
- aider/queries/tree-sitter-language-pack/d-tags.scm +26 -0
- aider/queries/tree-sitter-language-pack/dart-tags.scm +92 -0
- aider/queries/tree-sitter-language-pack/elisp-tags.scm +5 -0
- aider/queries/tree-sitter-language-pack/elixir-tags.scm +54 -0
- aider/queries/tree-sitter-language-pack/elm-tags.scm +19 -0
- aider/queries/tree-sitter-language-pack/gleam-tags.scm +41 -0
- aider/queries/tree-sitter-language-pack/go-tags.scm +42 -0
- aider/queries/tree-sitter-language-pack/java-tags.scm +20 -0
- aider/queries/tree-sitter-language-pack/javascript-tags.scm +88 -0
- aider/queries/tree-sitter-language-pack/lua-tags.scm +34 -0
- aider/queries/tree-sitter-language-pack/ocaml-tags.scm +115 -0
- aider/queries/tree-sitter-language-pack/ocaml_interface-tags.scm +98 -0
- aider/queries/tree-sitter-language-pack/pony-tags.scm +39 -0
- aider/queries/tree-sitter-language-pack/properties-tags.scm +5 -0
- aider/queries/tree-sitter-language-pack/python-tags.scm +14 -0
- aider/queries/tree-sitter-language-pack/r-tags.scm +21 -0
- aider/queries/tree-sitter-language-pack/racket-tags.scm +12 -0
- aider/queries/tree-sitter-language-pack/ruby-tags.scm +64 -0
- aider/queries/tree-sitter-language-pack/rust-tags.scm +60 -0
- aider/queries/tree-sitter-language-pack/solidity-tags.scm +43 -0
- aider/queries/tree-sitter-language-pack/swift-tags.scm +51 -0
- aider/queries/tree-sitter-language-pack/udev-tags.scm +20 -0
- aider/queries/tree-sitter-languages/README.md +23 -0
- aider/queries/tree-sitter-languages/c-tags.scm +9 -0
- aider/queries/tree-sitter-languages/c_sharp-tags.scm +46 -0
- aider/queries/tree-sitter-languages/cpp-tags.scm +15 -0
- aider/queries/tree-sitter-languages/dart-tags.scm +91 -0
- aider/queries/tree-sitter-languages/elisp-tags.scm +8 -0
- aider/queries/tree-sitter-languages/elixir-tags.scm +54 -0
- aider/queries/tree-sitter-languages/elm-tags.scm +19 -0
- aider/queries/tree-sitter-languages/go-tags.scm +30 -0
- aider/queries/tree-sitter-languages/hcl-tags.scm +77 -0
- aider/queries/tree-sitter-languages/java-tags.scm +20 -0
- aider/queries/tree-sitter-languages/javascript-tags.scm +88 -0
- aider/queries/tree-sitter-languages/kotlin-tags.scm +27 -0
- aider/queries/tree-sitter-languages/ocaml-tags.scm +115 -0
- aider/queries/tree-sitter-languages/ocaml_interface-tags.scm +98 -0
- aider/queries/tree-sitter-languages/php-tags.scm +26 -0
- aider/queries/tree-sitter-languages/python-tags.scm +12 -0
- aider/queries/tree-sitter-languages/ql-tags.scm +26 -0
- aider/queries/tree-sitter-languages/ruby-tags.scm +64 -0
- aider/queries/tree-sitter-languages/rust-tags.scm +60 -0
- aider/queries/tree-sitter-languages/scala-tags.scm +65 -0
- aider/queries/tree-sitter-languages/typescript-tags.scm +41 -0
- aider/reasoning_tags.py +82 -0
- aider/repo.py +623 -0
- aider/repomap.py +847 -0
- aider/report.py +200 -0
- aider/resources/__init__.py +3 -0
- aider/resources/model-metadata.json +468 -0
- aider/resources/model-settings.yml +1767 -0
- aider/run_cmd.py +132 -0
- aider/scrape.py +284 -0
- aider/sendchat.py +61 -0
- aider/special.py +203 -0
- aider/urls.py +17 -0
- aider/utils.py +338 -0
- aider/versioncheck.py +113 -0
- aider/voice.py +187 -0
- aider/waiting.py +221 -0
- aider/watch.py +318 -0
- aider/watch_prompts.py +12 -0
- aider/website/Gemfile +8 -0
- aider/website/_includes/blame.md +162 -0
- aider/website/_includes/get-started.md +22 -0
- aider/website/_includes/help-tip.md +5 -0
- aider/website/_includes/help.md +24 -0
- aider/website/_includes/install.md +5 -0
- aider/website/_includes/keys.md +4 -0
- aider/website/_includes/model-warnings.md +67 -0
- aider/website/_includes/multi-line.md +22 -0
- aider/website/_includes/python-m-aider.md +5 -0
- aider/website/_includes/recording.css +228 -0
- aider/website/_includes/recording.md +34 -0
- aider/website/_includes/replit-pipx.md +9 -0
- aider/website/_includes/works-best.md +1 -0
- aider/website/_sass/custom/custom.scss +103 -0
- aider/website/docs/config/adv-model-settings.md +1881 -0
- aider/website/docs/config/aider_conf.md +527 -0
- aider/website/docs/config/api-keys.md +90 -0
- aider/website/docs/config/dotenv.md +478 -0
- aider/website/docs/config/editor.md +127 -0
- aider/website/docs/config/model-aliases.md +103 -0
- aider/website/docs/config/options.md +843 -0
- aider/website/docs/config/reasoning.md +209 -0
- aider/website/docs/config.md +44 -0
- aider/website/docs/faq.md +378 -0
- aider/website/docs/git.md +76 -0
- aider/website/docs/index.md +47 -0
- aider/website/docs/install/codespaces.md +39 -0
- aider/website/docs/install/docker.md +57 -0
- aider/website/docs/install/optional.md +100 -0
- aider/website/docs/install/replit.md +8 -0
- aider/website/docs/install.md +115 -0
- aider/website/docs/languages.md +264 -0
- aider/website/docs/legal/contributor-agreement.md +111 -0
- aider/website/docs/legal/privacy.md +104 -0
- aider/website/docs/llms/anthropic.md +77 -0
- aider/website/docs/llms/azure.md +48 -0
- aider/website/docs/llms/bedrock.md +132 -0
- aider/website/docs/llms/cohere.md +34 -0
- aider/website/docs/llms/deepseek.md +32 -0
- aider/website/docs/llms/gemini.md +49 -0
- aider/website/docs/llms/github.md +105 -0
- aider/website/docs/llms/groq.md +36 -0
- aider/website/docs/llms/lm-studio.md +39 -0
- aider/website/docs/llms/ollama.md +75 -0
- aider/website/docs/llms/openai-compat.md +39 -0
- aider/website/docs/llms/openai.md +58 -0
- aider/website/docs/llms/openrouter.md +78 -0
- aider/website/docs/llms/other.md +103 -0
- aider/website/docs/llms/vertex.md +50 -0
- aider/website/docs/llms/warnings.md +10 -0
- aider/website/docs/llms/xai.md +53 -0
- aider/website/docs/llms.md +54 -0
- aider/website/docs/more/analytics.md +122 -0
- aider/website/docs/more/edit-formats.md +116 -0
- aider/website/docs/more/infinite-output.md +137 -0
- aider/website/docs/more-info.md +8 -0
- aider/website/docs/recordings/auto-accept-architect.md +31 -0
- aider/website/docs/recordings/dont-drop-original-read-files.md +35 -0
- aider/website/docs/recordings/index.md +21 -0
- aider/website/docs/recordings/model-accepts-settings.md +69 -0
- aider/website/docs/recordings/tree-sitter-language-pack.md +80 -0
- aider/website/docs/repomap.md +112 -0
- aider/website/docs/scripting.md +100 -0
- aider/website/docs/troubleshooting/aider-not-found.md +24 -0
- aider/website/docs/troubleshooting/edit-errors.md +76 -0
- aider/website/docs/troubleshooting/imports.md +62 -0
- aider/website/docs/troubleshooting/models-and-keys.md +54 -0
- aider/website/docs/troubleshooting/support.md +79 -0
- aider/website/docs/troubleshooting/token-limits.md +96 -0
- aider/website/docs/troubleshooting/warnings.md +12 -0
- aider/website/docs/troubleshooting.md +11 -0
- aider/website/docs/usage/browser.md +57 -0
- aider/website/docs/usage/caching.md +49 -0
- aider/website/docs/usage/commands.md +132 -0
- aider/website/docs/usage/conventions.md +119 -0
- aider/website/docs/usage/copypaste.md +121 -0
- aider/website/docs/usage/images-urls.md +48 -0
- aider/website/docs/usage/lint-test.md +118 -0
- aider/website/docs/usage/modes.md +211 -0
- aider/website/docs/usage/not-code.md +179 -0
- aider/website/docs/usage/notifications.md +87 -0
- aider/website/docs/usage/tips.md +79 -0
- aider/website/docs/usage/tutorials.md +30 -0
- aider/website/docs/usage/voice.md +121 -0
- aider/website/docs/usage/watch.md +294 -0
- aider/website/docs/usage.md +92 -0
- aider/website/share/index.md +101 -0
- chatmcp_cli-0.1.0.dist-info/METADATA +502 -0
- chatmcp_cli-0.1.0.dist-info/RECORD +228 -0
- chatmcp_cli-0.1.0.dist-info/WHEEL +5 -0
- chatmcp_cli-0.1.0.dist-info/entry_points.txt +3 -0
- chatmcp_cli-0.1.0.dist-info/licenses/LICENSE.txt +202 -0
- chatmcp_cli-0.1.0.dist-info/top_level.txt +1 -0
aider/report.py
ADDED
@@ -0,0 +1,200 @@
|
|
1
|
+
import os
|
2
|
+
import platform
|
3
|
+
import subprocess
|
4
|
+
import sys
|
5
|
+
import traceback
|
6
|
+
import urllib.parse
|
7
|
+
import webbrowser
|
8
|
+
|
9
|
+
from aider import __version__
|
10
|
+
from aider.urls import github_issues
|
11
|
+
from aider.versioncheck import VERSION_CHECK_FNAME
|
12
|
+
|
13
|
+
FENCE = "`" * 3
|
14
|
+
|
15
|
+
|
16
|
+
def get_python_info():
|
17
|
+
implementation = platform.python_implementation()
|
18
|
+
is_venv = sys.prefix != sys.base_prefix
|
19
|
+
return (
|
20
|
+
f"Python implementation: {implementation}\nVirtual environment:"
|
21
|
+
f" {'Yes' if is_venv else 'No'}"
|
22
|
+
)
|
23
|
+
|
24
|
+
|
25
|
+
def get_os_info():
|
26
|
+
return f"OS: {platform.system()} {platform.release()} ({platform.architecture()[0]})"
|
27
|
+
|
28
|
+
|
29
|
+
def get_git_info():
|
30
|
+
try:
|
31
|
+
git_version = subprocess.check_output(["git", "--version"]).decode().strip()
|
32
|
+
return f"Git version: {git_version}"
|
33
|
+
except Exception:
|
34
|
+
return "Git information unavailable"
|
35
|
+
|
36
|
+
|
37
|
+
def report_github_issue(issue_text, title=None, confirm=True):
|
38
|
+
"""
|
39
|
+
Compose a URL to open a new GitHub issue with the given text prefilled,
|
40
|
+
and attempt to launch it in the default web browser.
|
41
|
+
|
42
|
+
:param issue_text: The text of the issue to file
|
43
|
+
:param title: The title of the issue (optional)
|
44
|
+
:param confirm: Whether to ask for confirmation before opening the browser (default: True)
|
45
|
+
:return: None
|
46
|
+
"""
|
47
|
+
version_info = f"Aider version: {__version__}\n"
|
48
|
+
python_version = f"Python version: {sys.version.split()[0]}\n"
|
49
|
+
platform_info = f"Platform: {platform.platform()}\n"
|
50
|
+
python_info = get_python_info() + "\n"
|
51
|
+
os_info = get_os_info() + "\n"
|
52
|
+
git_info = get_git_info() + "\n"
|
53
|
+
|
54
|
+
system_info = (
|
55
|
+
version_info + python_version + platform_info + python_info + os_info + git_info + "\n"
|
56
|
+
)
|
57
|
+
|
58
|
+
issue_text = system_info + issue_text
|
59
|
+
params = {"body": issue_text}
|
60
|
+
if title is None:
|
61
|
+
title = "Bug report"
|
62
|
+
params["title"] = title
|
63
|
+
issue_url = f"{github_issues}?{urllib.parse.urlencode(params)}"
|
64
|
+
|
65
|
+
if confirm:
|
66
|
+
print(f"\n# {title}\n")
|
67
|
+
print(issue_text.strip())
|
68
|
+
print()
|
69
|
+
print("Please consider reporting this bug to help improve aider!")
|
70
|
+
prompt = "Open a GitHub Issue pre-filled with the above error in your browser? (Y/n) "
|
71
|
+
confirmation = input(prompt).strip().lower()
|
72
|
+
|
73
|
+
yes = not confirmation or confirmation.startswith("y")
|
74
|
+
if not yes:
|
75
|
+
return
|
76
|
+
|
77
|
+
print("Attempting to open the issue URL in your default web browser...")
|
78
|
+
try:
|
79
|
+
if webbrowser.open(issue_url):
|
80
|
+
print("Browser window should be opened.")
|
81
|
+
except Exception:
|
82
|
+
pass
|
83
|
+
|
84
|
+
if confirm:
|
85
|
+
print()
|
86
|
+
print()
|
87
|
+
print("You can also use this URL to file the GitHub Issue:")
|
88
|
+
print()
|
89
|
+
print(issue_url)
|
90
|
+
print()
|
91
|
+
print()
|
92
|
+
|
93
|
+
|
94
|
+
def exception_handler(exc_type, exc_value, exc_traceback):
|
95
|
+
# If it's a KeyboardInterrupt, just call the default handler
|
96
|
+
if issubclass(exc_type, KeyboardInterrupt):
|
97
|
+
return sys.__excepthook__(exc_type, exc_value, exc_traceback)
|
98
|
+
|
99
|
+
# We don't want any more exceptions
|
100
|
+
sys.excepthook = None
|
101
|
+
|
102
|
+
# Check if VERSION_CHECK_FNAME exists and delete it if so
|
103
|
+
try:
|
104
|
+
if VERSION_CHECK_FNAME.exists():
|
105
|
+
VERSION_CHECK_FNAME.unlink()
|
106
|
+
except Exception:
|
107
|
+
pass # Swallow any errors
|
108
|
+
|
109
|
+
# Format the traceback
|
110
|
+
tb_lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
|
111
|
+
|
112
|
+
# Replace full paths with basenames in the traceback
|
113
|
+
tb_lines_with_basenames = []
|
114
|
+
for line in tb_lines:
|
115
|
+
try:
|
116
|
+
if "File " in line:
|
117
|
+
parts = line.split('"')
|
118
|
+
if len(parts) > 1:
|
119
|
+
full_path = parts[1]
|
120
|
+
basename = os.path.basename(full_path)
|
121
|
+
line = line.replace(full_path, basename)
|
122
|
+
except Exception:
|
123
|
+
pass
|
124
|
+
tb_lines_with_basenames.append(line)
|
125
|
+
|
126
|
+
tb_text = "".join(tb_lines_with_basenames)
|
127
|
+
|
128
|
+
# Find the innermost frame
|
129
|
+
innermost_tb = exc_traceback
|
130
|
+
while innermost_tb.tb_next:
|
131
|
+
innermost_tb = innermost_tb.tb_next
|
132
|
+
|
133
|
+
# Get the filename and line number from the innermost frame
|
134
|
+
filename = innermost_tb.tb_frame.f_code.co_filename
|
135
|
+
line_number = innermost_tb.tb_lineno
|
136
|
+
try:
|
137
|
+
basename = os.path.basename(filename)
|
138
|
+
except Exception:
|
139
|
+
basename = filename
|
140
|
+
|
141
|
+
# Get the exception type name
|
142
|
+
exception_type = exc_type.__name__
|
143
|
+
|
144
|
+
# Prepare the issue text
|
145
|
+
issue_text = f"An uncaught exception occurred:\n\n{FENCE}\n{tb_text}\n{FENCE}"
|
146
|
+
|
147
|
+
# Prepare the title
|
148
|
+
title = f"Uncaught {exception_type} in {basename} line {line_number}"
|
149
|
+
|
150
|
+
# Report the issue
|
151
|
+
report_github_issue(issue_text, title=title)
|
152
|
+
|
153
|
+
# Call the default exception handler
|
154
|
+
sys.__excepthook__(exc_type, exc_value, exc_traceback)
|
155
|
+
|
156
|
+
|
157
|
+
def report_uncaught_exceptions():
|
158
|
+
"""
|
159
|
+
Set up the global exception handler to report uncaught exceptions.
|
160
|
+
"""
|
161
|
+
sys.excepthook = exception_handler
|
162
|
+
|
163
|
+
|
164
|
+
def dummy_function1():
|
165
|
+
def dummy_function2():
|
166
|
+
def dummy_function3():
|
167
|
+
raise ValueError("boo")
|
168
|
+
|
169
|
+
dummy_function3()
|
170
|
+
|
171
|
+
dummy_function2()
|
172
|
+
|
173
|
+
|
174
|
+
def main():
|
175
|
+
report_uncaught_exceptions()
|
176
|
+
|
177
|
+
dummy_function1()
|
178
|
+
|
179
|
+
title = None
|
180
|
+
if len(sys.argv) > 2:
|
181
|
+
# Use the first command-line argument as the title and the second as the issue text
|
182
|
+
title = sys.argv[1]
|
183
|
+
issue_text = sys.argv[2]
|
184
|
+
elif len(sys.argv) > 1:
|
185
|
+
# Use the first command-line argument as the issue text
|
186
|
+
issue_text = sys.argv[1]
|
187
|
+
else:
|
188
|
+
# Read from stdin if no argument is provided
|
189
|
+
print("Enter the issue title (optional, press Enter to skip):")
|
190
|
+
title = input().strip()
|
191
|
+
if not title:
|
192
|
+
title = None
|
193
|
+
print("Enter the issue text (Ctrl+D to finish):")
|
194
|
+
issue_text = sys.stdin.read().strip()
|
195
|
+
|
196
|
+
report_github_issue(issue_text, title)
|
197
|
+
|
198
|
+
|
199
|
+
if __name__ == "__main__":
|
200
|
+
main()
|
@@ -0,0 +1,468 @@
|
|
1
|
+
{
|
2
|
+
"deepseek-reasoner": {
|
3
|
+
"max_tokens": 8192,
|
4
|
+
"max_input_tokens": 64000,
|
5
|
+
"max_output_tokens": 8192,
|
6
|
+
"input_cost_per_token": 0.00000055,
|
7
|
+
"input_cost_per_token_cache_hit": 0.00000014,
|
8
|
+
"cache_read_input_token_cost": 0.00000014,
|
9
|
+
"cache_creation_input_token_cost": 0.0,
|
10
|
+
"output_cost_per_token": 0.00000219,
|
11
|
+
"litellm_provider": "deepseek",
|
12
|
+
"mode": "chat",
|
13
|
+
//"supports_function_calling": true,
|
14
|
+
"supports_assistant_prefill": true,
|
15
|
+
//"supports_tool_choice": true,
|
16
|
+
"supports_prompt_caching": true
|
17
|
+
},
|
18
|
+
"openrouter/deepseek/deepseek-r1:free": {
|
19
|
+
"max_tokens": 8192,
|
20
|
+
"max_input_tokens": 64000,
|
21
|
+
"max_output_tokens": 8192,
|
22
|
+
"input_cost_per_token": 0.0,
|
23
|
+
"input_cost_per_token_cache_hit": 0.0,
|
24
|
+
"cache_read_input_token_cost": 0.00,
|
25
|
+
"cache_creation_input_token_cost": 0.0,
|
26
|
+
"output_cost_per_token": 0.0,
|
27
|
+
"litellm_provider": "openrouter",
|
28
|
+
"mode": "chat",
|
29
|
+
//"supports_function_calling": true,
|
30
|
+
"supports_assistant_prefill": true,
|
31
|
+
//"supports_tool_choice": true,
|
32
|
+
"supports_prompt_caching": true
|
33
|
+
},
|
34
|
+
"openrouter/deepseek/deepseek-chat:free": {
|
35
|
+
"max_tokens": 8192,
|
36
|
+
"max_input_tokens": 64000,
|
37
|
+
"max_output_tokens": 8192,
|
38
|
+
"input_cost_per_token": 0.0,
|
39
|
+
"input_cost_per_token_cache_hit": 0.0,
|
40
|
+
"cache_read_input_token_cost": 0.00,
|
41
|
+
"cache_creation_input_token_cost": 0.0,
|
42
|
+
"output_cost_per_token": 0.0,
|
43
|
+
"litellm_provider": "openrouter",
|
44
|
+
"mode": "chat",
|
45
|
+
//"supports_function_calling": true,
|
46
|
+
"supports_assistant_prefill": true,
|
47
|
+
//"supports_tool_choice": true,
|
48
|
+
"supports_prompt_caching": true
|
49
|
+
},
|
50
|
+
"openrouter/deepseek/deepseek-chat-v3-0324": {
|
51
|
+
"max_tokens": 8192,
|
52
|
+
"max_input_tokens": 131072,
|
53
|
+
"max_output_tokens": 8192,
|
54
|
+
"input_cost_per_token": 0.00000055,
|
55
|
+
"input_cost_per_token_cache_hit": 0.00000014,
|
56
|
+
"cache_read_input_token_cost": 0.00000014,
|
57
|
+
"cache_creation_input_token_cost": 0.0,
|
58
|
+
"output_cost_per_token": 0.00000219,
|
59
|
+
"litellm_provider": "openrouter",
|
60
|
+
"mode": "chat",
|
61
|
+
//"supports_function_calling": true,
|
62
|
+
"supports_assistant_prefill": true,
|
63
|
+
//"supports_tool_choice": true,
|
64
|
+
"supports_prompt_caching": true
|
65
|
+
},
|
66
|
+
"openrouter/deepseek/deepseek-chat-v3-0324:free": {
|
67
|
+
"max_tokens": 131072,
|
68
|
+
"max_input_tokens": 131072,
|
69
|
+
"max_output_tokens": 131072,
|
70
|
+
"input_cost_per_token": 0,
|
71
|
+
"output_cost_per_token": 0,
|
72
|
+
"litellm_provider": "openrouter",
|
73
|
+
"supports_prompt_caching": true,
|
74
|
+
"mode": "chat",
|
75
|
+
"supports_tool_choice": true
|
76
|
+
},
|
77
|
+
"fireworks_ai/accounts/fireworks/models/deepseek-r1": {
|
78
|
+
"max_tokens": 160000,
|
79
|
+
"max_input_tokens": 128000,
|
80
|
+
"max_output_tokens": 20480,
|
81
|
+
"litellm_provider": "fireworks_ai",
|
82
|
+
"input_cost_per_token": 0.000008,
|
83
|
+
"output_cost_per_token": 0.000008,
|
84
|
+
"mode": "chat",
|
85
|
+
},
|
86
|
+
"fireworks_ai/accounts/fireworks/models/deepseek-v3-0324": {
|
87
|
+
"max_tokens": 160000,
|
88
|
+
"max_input_tokens": 100000,
|
89
|
+
"max_output_tokens": 8192,
|
90
|
+
"litellm_provider": "fireworks_ai",
|
91
|
+
"input_cost_per_token": 0.0000009,
|
92
|
+
"output_cost_per_token": 0.0000009,
|
93
|
+
"mode": "chat",
|
94
|
+
},
|
95
|
+
"openrouter/openrouter/quasar-alpha": {
|
96
|
+
"max_input_tokens": 1000000,
|
97
|
+
"max_output_tokens": 32000,
|
98
|
+
"input_cost_per_token": 0.0,
|
99
|
+
"output_cost_per_token": 0.0,
|
100
|
+
"litellm_provider": "openrouter",
|
101
|
+
"mode": "chat",
|
102
|
+
"supports_vision": true,
|
103
|
+
"supports_function_calling": true,
|
104
|
+
"supports_system_messages": true,
|
105
|
+
"supports_prompt_caching": true
|
106
|
+
},
|
107
|
+
"openrouter/openrouter/optimus-alpha": {
|
108
|
+
"max_input_tokens": 1000000,
|
109
|
+
"max_output_tokens": 32000,
|
110
|
+
"input_cost_per_token": 0.0,
|
111
|
+
"output_cost_per_token": 0.0,
|
112
|
+
"litellm_provider": "openrouter",
|
113
|
+
"mode": "chat"
|
114
|
+
},
|
115
|
+
"openrouter/openai/gpt-4o-mini": {
|
116
|
+
"max_tokens": 16384,
|
117
|
+
"max_input_tokens": 128000,
|
118
|
+
"max_output_tokens": 16384,
|
119
|
+
"input_cost_per_token": 0.00000015,
|
120
|
+
"output_cost_per_token": 0.00000060,
|
121
|
+
"input_cost_per_token_batches": 0.000000075,
|
122
|
+
"output_cost_per_token_batches": 0.00000030,
|
123
|
+
"cache_read_input_token_cost": 0.000000075,
|
124
|
+
"litellm_provider": "openrouter",
|
125
|
+
"mode": "chat",
|
126
|
+
"supports_function_calling": true,
|
127
|
+
"supports_parallel_function_calling": true,
|
128
|
+
"supports_response_schema": true,
|
129
|
+
"supports_vision": true,
|
130
|
+
"supports_prompt_caching": true,
|
131
|
+
"supports_system_messages": true
|
132
|
+
},
|
133
|
+
"anthropic/claude-3-7-sonnet-20250219": {
|
134
|
+
"max_tokens": 8192,
|
135
|
+
"max_input_tokens": 200000,
|
136
|
+
"max_output_tokens": 8192,
|
137
|
+
"input_cost_per_token": 0.000003,
|
138
|
+
"output_cost_per_token": 0.000015,
|
139
|
+
"cache_creation_input_token_cost": 0.00000375,
|
140
|
+
"cache_read_input_token_cost": 0.0000003,
|
141
|
+
"litellm_provider": "anthropic",
|
142
|
+
"mode": "chat",
|
143
|
+
"supports_function_calling": true,
|
144
|
+
"supports_vision": true,
|
145
|
+
"tool_use_system_prompt_tokens": 159,
|
146
|
+
"supports_assistant_prefill": true,
|
147
|
+
"supports_pdf_input": true,
|
148
|
+
"supports_prompt_caching": true,
|
149
|
+
"supports_response_schema": true,
|
150
|
+
"deprecation_date": "2025-10-01",
|
151
|
+
"supports_tool_choice": true
|
152
|
+
},
|
153
|
+
"openai/gpt-4.5-preview": {
|
154
|
+
"max_tokens": 16384,
|
155
|
+
"max_input_tokens": 128000,
|
156
|
+
"max_output_tokens": 16384,
|
157
|
+
"input_cost_per_token": 0.000075,
|
158
|
+
"output_cost_per_token": 0.00015,
|
159
|
+
"cache_read_input_token_cost": 0.0000375,
|
160
|
+
"litellm_provider": "openai",
|
161
|
+
"mode": "chat",
|
162
|
+
"supports_function_calling": true,
|
163
|
+
"supports_parallel_function_calling": true,
|
164
|
+
"supports_response_schema": true,
|
165
|
+
"supports_vision": true,
|
166
|
+
"supports_prompt_caching": true,
|
167
|
+
"supports_system_messages": true,
|
168
|
+
"supports_tool_choice": true
|
169
|
+
},
|
170
|
+
"gemini/gemini-2.5-pro-exp-03-25": {
|
171
|
+
"max_tokens": 8192,
|
172
|
+
"max_input_tokens": 1048576,
|
173
|
+
"max_output_tokens": 64000,
|
174
|
+
"max_images_per_prompt": 3000,
|
175
|
+
"max_videos_per_prompt": 10,
|
176
|
+
"max_video_length": 1,
|
177
|
+
"max_audio_length_hours": 8.4,
|
178
|
+
"max_audio_per_prompt": 1,
|
179
|
+
"max_pdf_size_mb": 30,
|
180
|
+
"input_cost_per_image": 0,
|
181
|
+
"input_cost_per_video_per_second": 0,
|
182
|
+
"input_cost_per_audio_per_second": 0,
|
183
|
+
"input_cost_per_token": 0,
|
184
|
+
"input_cost_per_character": 0,
|
185
|
+
"input_cost_per_token_above_128k_tokens": 0,
|
186
|
+
"input_cost_per_character_above_128k_tokens": 0,
|
187
|
+
"input_cost_per_image_above_128k_tokens": 0,
|
188
|
+
"input_cost_per_video_per_second_above_128k_tokens": 0,
|
189
|
+
"input_cost_per_audio_per_second_above_128k_tokens": 0,
|
190
|
+
"output_cost_per_token": 0,
|
191
|
+
"output_cost_per_character": 0,
|
192
|
+
"output_cost_per_token_above_128k_tokens": 0,
|
193
|
+
"output_cost_per_character_above_128k_tokens": 0,
|
194
|
+
//"litellm_provider": "vertex_ai-language-models",
|
195
|
+
"litellm_provider": "gemini",
|
196
|
+
"mode": "chat",
|
197
|
+
"supports_system_messages": true,
|
198
|
+
"supports_function_calling": true,
|
199
|
+
"supports_vision": true,
|
200
|
+
"supports_audio_input": true,
|
201
|
+
"supports_video_input": true,
|
202
|
+
"supports_pdf_input": true,
|
203
|
+
"supports_response_schema": true,
|
204
|
+
"supports_tool_choice": true,
|
205
|
+
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing"
|
206
|
+
},
|
207
|
+
"vertex_ai/gemini-2.5-pro-exp-03-25": {
|
208
|
+
"max_tokens": 8192,
|
209
|
+
"max_input_tokens": 1048576,
|
210
|
+
"max_output_tokens": 64000,
|
211
|
+
"max_images_per_prompt": 3000,
|
212
|
+
"max_videos_per_prompt": 10,
|
213
|
+
"max_video_length": 1,
|
214
|
+
"max_audio_length_hours": 8.4,
|
215
|
+
"max_audio_per_prompt": 1,
|
216
|
+
"max_pdf_size_mb": 30,
|
217
|
+
"input_cost_per_image": 0,
|
218
|
+
"input_cost_per_video_per_second": 0,
|
219
|
+
"input_cost_per_audio_per_second": 0,
|
220
|
+
"input_cost_per_token": 0,
|
221
|
+
"input_cost_per_character": 0,
|
222
|
+
"input_cost_per_token_above_128k_tokens": 0,
|
223
|
+
"input_cost_per_character_above_128k_tokens": 0,
|
224
|
+
"input_cost_per_image_above_128k_tokens": 0,
|
225
|
+
"input_cost_per_video_per_second_above_128k_tokens": 0,
|
226
|
+
"input_cost_per_audio_per_second_above_128k_tokens": 0,
|
227
|
+
"output_cost_per_token": 0,
|
228
|
+
"output_cost_per_character": 0,
|
229
|
+
"output_cost_per_token_above_128k_tokens": 0,
|
230
|
+
"output_cost_per_character_above_128k_tokens": 0,
|
231
|
+
"litellm_provider": "vertex_ai-language-models",
|
232
|
+
"mode": "chat",
|
233
|
+
"supports_system_messages": true,
|
234
|
+
"supports_function_calling": true,
|
235
|
+
"supports_vision": true,
|
236
|
+
"supports_audio_input": true,
|
237
|
+
"supports_video_input": true,
|
238
|
+
"supports_pdf_input": true,
|
239
|
+
"supports_response_schema": true,
|
240
|
+
"supports_tool_choice": true,
|
241
|
+
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing"
|
242
|
+
},
|
243
|
+
"vertex_ai/gemini-2.5-pro-preview-03-25": {
|
244
|
+
"max_tokens": 8192,
|
245
|
+
"max_input_tokens": 1048576,
|
246
|
+
"max_output_tokens": 64000,
|
247
|
+
"max_images_per_prompt": 3000,
|
248
|
+
"max_videos_per_prompt": 10,
|
249
|
+
"max_video_length": 1,
|
250
|
+
"max_audio_length_hours": 8.4,
|
251
|
+
"max_audio_per_prompt": 1,
|
252
|
+
"max_pdf_size_mb": 30,
|
253
|
+
"input_cost_per_image": 0,
|
254
|
+
"input_cost_per_video_per_second": 0,
|
255
|
+
"input_cost_per_audio_per_second": 0,
|
256
|
+
"input_cost_per_token": 0.00000125,
|
257
|
+
"input_cost_per_character": 0,
|
258
|
+
"input_cost_per_token_above_128k_tokens": 0,
|
259
|
+
"input_cost_per_character_above_128k_tokens": 0,
|
260
|
+
"input_cost_per_image_above_128k_tokens": 0,
|
261
|
+
"input_cost_per_video_per_second_above_128k_tokens": 0,
|
262
|
+
"input_cost_per_audio_per_second_above_128k_tokens": 0,
|
263
|
+
"output_cost_per_token": 0.000010,
|
264
|
+
"output_cost_per_character": 0,
|
265
|
+
"output_cost_per_token_above_128k_tokens": 0,
|
266
|
+
"output_cost_per_character_above_128k_tokens": 0,
|
267
|
+
"litellm_provider": "vertex_ai-language-models",
|
268
|
+
"mode": "chat",
|
269
|
+
"supports_system_messages": true,
|
270
|
+
"supports_function_calling": true,
|
271
|
+
"supports_vision": true,
|
272
|
+
"supports_audio_input": true,
|
273
|
+
"supports_video_input": true,
|
274
|
+
"supports_pdf_input": true,
|
275
|
+
"supports_response_schema": true,
|
276
|
+
"supports_tool_choice": true,
|
277
|
+
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing"
|
278
|
+
},
|
279
|
+
"openrouter/google/gemini-2.5-pro-preview-03-25": {
|
280
|
+
"max_tokens": 8192,
|
281
|
+
"max_input_tokens": 1048576,
|
282
|
+
"max_output_tokens": 64000,
|
283
|
+
"max_images_per_prompt": 3000,
|
284
|
+
"max_videos_per_prompt": 10,
|
285
|
+
"max_video_length": 1,
|
286
|
+
"max_audio_length_hours": 8.4,
|
287
|
+
"max_audio_per_prompt": 1,
|
288
|
+
"max_pdf_size_mb": 30,
|
289
|
+
"input_cost_per_image": 0,
|
290
|
+
"input_cost_per_video_per_second": 0,
|
291
|
+
"input_cost_per_audio_per_second": 0,
|
292
|
+
"input_cost_per_token": 0.00000125,
|
293
|
+
"input_cost_per_character": 0,
|
294
|
+
"input_cost_per_token_above_128k_tokens": 0,
|
295
|
+
"input_cost_per_character_above_128k_tokens": 0,
|
296
|
+
"input_cost_per_image_above_128k_tokens": 0,
|
297
|
+
"input_cost_per_video_per_second_above_128k_tokens": 0,
|
298
|
+
"input_cost_per_audio_per_second_above_128k_tokens": 0,
|
299
|
+
"output_cost_per_token": 0.000010,
|
300
|
+
"output_cost_per_character": 0,
|
301
|
+
"output_cost_per_token_above_128k_tokens": 0,
|
302
|
+
"output_cost_per_character_above_128k_tokens": 0,
|
303
|
+
"litellm_provider": "vertex_ai-language-models",
|
304
|
+
"mode": "chat",
|
305
|
+
"supports_system_messages": true,
|
306
|
+
"supports_function_calling": true,
|
307
|
+
"supports_vision": true,
|
308
|
+
"supports_audio_input": true,
|
309
|
+
"supports_video_input": true,
|
310
|
+
"supports_pdf_input": true,
|
311
|
+
"supports_response_schema": true,
|
312
|
+
"supports_tool_choice": true,
|
313
|
+
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing"
|
314
|
+
},
|
315
|
+
"openrouter/google/gemini-2.5-pro-exp-03-25": {
|
316
|
+
"max_tokens": 8192,
|
317
|
+
"max_input_tokens": 1048576,
|
318
|
+
"max_output_tokens": 64000,
|
319
|
+
"max_images_per_prompt": 3000,
|
320
|
+
"max_videos_per_prompt": 10,
|
321
|
+
"max_video_length": 1,
|
322
|
+
"max_audio_length_hours": 8.4,
|
323
|
+
"max_audio_per_prompt": 1,
|
324
|
+
"max_pdf_size_mb": 30,
|
325
|
+
"input_cost_per_image": 0,
|
326
|
+
"input_cost_per_video_per_second": 0,
|
327
|
+
"input_cost_per_audio_per_second": 0,
|
328
|
+
"input_cost_per_token": 0,
|
329
|
+
"input_cost_per_character": 0,
|
330
|
+
"input_cost_per_token_above_128k_tokens": 0,
|
331
|
+
"input_cost_per_character_above_128k_tokens": 0,
|
332
|
+
"input_cost_per_image_above_128k_tokens": 0,
|
333
|
+
"input_cost_per_video_per_second_above_128k_tokens": 0,
|
334
|
+
"input_cost_per_audio_per_second_above_128k_tokens": 0,
|
335
|
+
"output_cost_per_token": 0,
|
336
|
+
"output_cost_per_character": 0,
|
337
|
+
"output_cost_per_token_above_128k_tokens": 0,
|
338
|
+
"output_cost_per_character_above_128k_tokens": 0,
|
339
|
+
"litellm_provider": "openrouter",
|
340
|
+
"mode": "chat",
|
341
|
+
"supports_system_messages": true,
|
342
|
+
"supports_function_calling": true,
|
343
|
+
"supports_vision": true,
|
344
|
+
"supports_audio_input": true,
|
345
|
+
"supports_video_input": true,
|
346
|
+
"supports_pdf_input": true,
|
347
|
+
"supports_response_schema": true,
|
348
|
+
"supports_tool_choice": true,
|
349
|
+
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing"
|
350
|
+
},
|
351
|
+
"openrouter/x-ai/grok-3-beta": {
|
352
|
+
"max_tokens": 131072,
|
353
|
+
"max_input_tokens": 131072,
|
354
|
+
"max_output_tokens": 131072,
|
355
|
+
"input_cost_per_token": 0.000003,
|
356
|
+
"output_cost_per_token": 0.000015,
|
357
|
+
"litellm_provider": "openrouter",
|
358
|
+
"mode": "chat"
|
359
|
+
},
|
360
|
+
"openrouter/x-ai/grok-3-mini-beta": {
|
361
|
+
"max_tokens": 131072,
|
362
|
+
"max_input_tokens": 131072,
|
363
|
+
"max_output_tokens": 131072,
|
364
|
+
"input_cost_per_token": 0.0000003,
|
365
|
+
"output_cost_per_token": 0.0000005,
|
366
|
+
"litellm_provider": "openrouter",
|
367
|
+
"mode": "chat"
|
368
|
+
},
|
369
|
+
"openrouter/x-ai/grok-3-fast-beta": {
|
370
|
+
"max_tokens": 131072,
|
371
|
+
"max_input_tokens": 131072,
|
372
|
+
"max_output_tokens": 131072,
|
373
|
+
"input_cost_per_token": 0.000005,
|
374
|
+
"output_cost_per_token": 0.000025,
|
375
|
+
"litellm_provider": "openrouter",
|
376
|
+
"mode": "chat"
|
377
|
+
},
|
378
|
+
"openrouter/x-ai/grok-3-mini-fast-beta": {
|
379
|
+
"max_tokens": 131072,
|
380
|
+
"max_input_tokens": 131072,
|
381
|
+
"max_output_tokens": 131072,
|
382
|
+
"input_cost_per_token": 0.0000006,
|
383
|
+
"output_cost_per_token": 0.000004,
|
384
|
+
"litellm_provider": "openrouter",
|
385
|
+
"mode": "chat"
|
386
|
+
},
|
387
|
+
"openrouter/google/gemini-2.0-flash-exp:free": {
|
388
|
+
"max_tokens": 8192,
|
389
|
+
"max_input_tokens": 1048576,
|
390
|
+
"max_output_tokens": 8192,
|
391
|
+
"max_images_per_prompt": 3000,
|
392
|
+
"max_videos_per_prompt": 10,
|
393
|
+
"max_video_length": 1,
|
394
|
+
"max_audio_length_hours": 8.4,
|
395
|
+
"max_audio_per_prompt": 1,
|
396
|
+
"max_pdf_size_mb": 30,
|
397
|
+
"litellm_provider": "openrouter",
|
398
|
+
"mode": "chat",
|
399
|
+
"supports_system_messages": true,
|
400
|
+
"supports_function_calling": true,
|
401
|
+
"supports_vision": true,
|
402
|
+
"supports_response_schema": true,
|
403
|
+
"supports_audio_output": true,
|
404
|
+
"supports_tool_choice": true
|
405
|
+
},
|
406
|
+
"gemini-2.5-pro-preview-05-06": {
|
407
|
+
"max_tokens": 65536,
|
408
|
+
"max_input_tokens": 1048576,
|
409
|
+
"max_output_tokens": 65536,
|
410
|
+
"max_images_per_prompt": 3000,
|
411
|
+
"max_videos_per_prompt": 10,
|
412
|
+
"max_video_length": 1,
|
413
|
+
"max_audio_length_hours": 8.4,
|
414
|
+
"max_audio_per_prompt": 1,
|
415
|
+
"max_pdf_size_mb": 30,
|
416
|
+
"input_cost_per_audio_token": 0.00000125,
|
417
|
+
"input_cost_per_token": 0.00000125,
|
418
|
+
"input_cost_per_token_above_200k_tokens": 0.0000025,
|
419
|
+
"output_cost_per_token": 0.00001,
|
420
|
+
"output_cost_per_token_above_200k_tokens": 0.000015,
|
421
|
+
"litellm_provider": "vertex_ai-language-models",
|
422
|
+
"mode": "chat",
|
423
|
+
"supports_reasoning": true,
|
424
|
+
"supports_system_messages": true,
|
425
|
+
"supports_function_calling": true,
|
426
|
+
"supports_vision": true,
|
427
|
+
"supports_response_schema": true,
|
428
|
+
"supports_audio_output": false,
|
429
|
+
"supports_tool_choice": true,
|
430
|
+
"supported_endpoints": ["/v1/chat/completions", "/v1/completions", "/v1/batch"],
|
431
|
+
"supported_modalities": ["text", "image", "audio", "video"],
|
432
|
+
"supported_output_modalities": ["text"],
|
433
|
+
"source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview"
|
434
|
+
},
|
435
|
+
"gemini/gemini-2.5-pro-preview-05-06": {
|
436
|
+
"max_tokens": 65536,
|
437
|
+
"max_input_tokens": 1048576,
|
438
|
+
"max_output_tokens": 65536,
|
439
|
+
"max_images_per_prompt": 3000,
|
440
|
+
"max_videos_per_prompt": 10,
|
441
|
+
"max_video_length": 1,
|
442
|
+
"max_audio_length_hours": 8.4,
|
443
|
+
"max_audio_per_prompt": 1,
|
444
|
+
"max_pdf_size_mb": 30,
|
445
|
+
"input_cost_per_audio_token": 0.0000007,
|
446
|
+
"input_cost_per_token": 0.00000125,
|
447
|
+
"input_cost_per_token_above_200k_tokens": 0.0000025,
|
448
|
+
"output_cost_per_token": 0.00001,
|
449
|
+
"output_cost_per_token_above_200k_tokens": 0.000015,
|
450
|
+
"litellm_provider": "gemini",
|
451
|
+
"mode": "chat",
|
452
|
+
"rpm": 10000,
|
453
|
+
"tpm": 10000000,
|
454
|
+
"supports_system_messages": true,
|
455
|
+
"supports_function_calling": true,
|
456
|
+
"supports_vision": true,
|
457
|
+
"supports_response_schema": true,
|
458
|
+
"supports_audio_output": false,
|
459
|
+
"supports_tool_choice": true,
|
460
|
+
"supported_modalities": ["text", "image", "audio", "video"],
|
461
|
+
"supported_output_modalities": ["text"],
|
462
|
+
"source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.5-pro-preview"
|
463
|
+
},
|
464
|
+
"together_ai/Qwen/Qwen3-235B-A22B-fp8-tput": {
|
465
|
+
"input_cost_per_token": 0.0000002,
|
466
|
+
"output_cost_per_token": 0.0000006,
|
467
|
+
}
|
468
|
+
}
|