cecli-dev 0.95.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cecli/__init__.py +20 -0
- cecli/__main__.py +4 -0
- cecli/_version.py +34 -0
- cecli/args.py +1092 -0
- cecli/args_formatter.py +228 -0
- cecli/change_tracker.py +133 -0
- cecli/coders/__init__.py +38 -0
- cecli/coders/agent_coder.py +1872 -0
- cecli/coders/architect_coder.py +63 -0
- cecli/coders/ask_coder.py +8 -0
- cecli/coders/base_coder.py +3993 -0
- cecli/coders/chat_chunks.py +116 -0
- cecli/coders/context_coder.py +52 -0
- cecli/coders/copypaste_coder.py +269 -0
- cecli/coders/editblock_coder.py +656 -0
- cecli/coders/editblock_fenced_coder.py +9 -0
- cecli/coders/editblock_func_coder.py +140 -0
- cecli/coders/editor_diff_fenced_coder.py +8 -0
- cecli/coders/editor_editblock_coder.py +8 -0
- cecli/coders/editor_whole_coder.py +8 -0
- cecli/coders/help_coder.py +15 -0
- cecli/coders/patch_coder.py +705 -0
- cecli/coders/search_replace.py +757 -0
- cecli/coders/shell.py +37 -0
- cecli/coders/single_wholefile_func_coder.py +101 -0
- cecli/coders/udiff_coder.py +428 -0
- cecli/coders/udiff_simple.py +12 -0
- cecli/coders/wholefile_coder.py +143 -0
- cecli/coders/wholefile_func_coder.py +133 -0
- cecli/commands/__init__.py +192 -0
- cecli/commands/add.py +226 -0
- cecli/commands/agent.py +51 -0
- cecli/commands/architect.py +46 -0
- cecli/commands/ask.py +44 -0
- cecli/commands/chat_mode.py +0 -0
- cecli/commands/clear.py +37 -0
- cecli/commands/code.py +46 -0
- cecli/commands/command_prefix.py +44 -0
- cecli/commands/commit.py +52 -0
- cecli/commands/context.py +47 -0
- cecli/commands/context_blocks.py +124 -0
- cecli/commands/context_management.py +51 -0
- cecli/commands/copy.py +62 -0
- cecli/commands/copy_context.py +81 -0
- cecli/commands/core.py +287 -0
- cecli/commands/diff.py +68 -0
- cecli/commands/drop.py +217 -0
- cecli/commands/editor.py +78 -0
- cecli/commands/exit.py +55 -0
- cecli/commands/git.py +57 -0
- cecli/commands/help.py +140 -0
- cecli/commands/history_search.py +40 -0
- cecli/commands/lint.py +109 -0
- cecli/commands/list_sessions.py +56 -0
- cecli/commands/load.py +85 -0
- cecli/commands/load_session.py +48 -0
- cecli/commands/load_skill.py +68 -0
- cecli/commands/ls.py +75 -0
- cecli/commands/map.py +37 -0
- cecli/commands/map_refresh.py +35 -0
- cecli/commands/model.py +118 -0
- cecli/commands/models.py +41 -0
- cecli/commands/multiline_mode.py +38 -0
- cecli/commands/paste.py +91 -0
- cecli/commands/quit.py +32 -0
- cecli/commands/read_only.py +267 -0
- cecli/commands/read_only_stub.py +270 -0
- cecli/commands/reasoning_effort.py +70 -0
- cecli/commands/remove_skill.py +68 -0
- cecli/commands/report.py +40 -0
- cecli/commands/reset.py +88 -0
- cecli/commands/run.py +99 -0
- cecli/commands/save.py +49 -0
- cecli/commands/save_session.py +43 -0
- cecli/commands/settings.py +69 -0
- cecli/commands/test.py +58 -0
- cecli/commands/think_tokens.py +74 -0
- cecli/commands/tokens.py +207 -0
- cecli/commands/undo.py +145 -0
- cecli/commands/utils/__init__.py +0 -0
- cecli/commands/utils/base_command.py +131 -0
- cecli/commands/utils/helpers.py +142 -0
- cecli/commands/utils/registry.py +53 -0
- cecli/commands/utils/save_load_manager.py +98 -0
- cecli/commands/voice.py +78 -0
- cecli/commands/weak_model.py +123 -0
- cecli/commands/web.py +87 -0
- cecli/deprecated_args.py +185 -0
- cecli/diffs.py +129 -0
- cecli/dump.py +29 -0
- cecli/editor.py +147 -0
- cecli/exceptions.py +115 -0
- cecli/format_settings.py +26 -0
- cecli/help.py +119 -0
- cecli/help_pats.py +19 -0
- cecli/helpers/__init__.py +9 -0
- cecli/helpers/copypaste.py +123 -0
- cecli/helpers/coroutines.py +8 -0
- cecli/helpers/file_searcher.py +142 -0
- cecli/helpers/model_providers.py +552 -0
- cecli/helpers/plugin_manager.py +81 -0
- cecli/helpers/profiler.py +162 -0
- cecli/helpers/requests.py +77 -0
- cecli/helpers/similarity.py +98 -0
- cecli/helpers/skills.py +577 -0
- cecli/history.py +186 -0
- cecli/io.py +1782 -0
- cecli/linter.py +304 -0
- cecli/llm.py +101 -0
- cecli/main.py +1280 -0
- cecli/mcp/__init__.py +154 -0
- cecli/mcp/oauth.py +250 -0
- cecli/mcp/server.py +278 -0
- cecli/mdstream.py +243 -0
- cecli/models.py +1255 -0
- cecli/onboarding.py +301 -0
- cecli/prompts/__init__.py +0 -0
- cecli/prompts/agent.yml +71 -0
- cecli/prompts/architect.yml +35 -0
- cecli/prompts/ask.yml +31 -0
- cecli/prompts/base.yml +99 -0
- cecli/prompts/context.yml +60 -0
- cecli/prompts/copypaste.yml +5 -0
- cecli/prompts/editblock.yml +143 -0
- cecli/prompts/editblock_fenced.yml +106 -0
- cecli/prompts/editblock_func.yml +25 -0
- cecli/prompts/editor_diff_fenced.yml +115 -0
- cecli/prompts/editor_editblock.yml +121 -0
- cecli/prompts/editor_whole.yml +46 -0
- cecli/prompts/help.yml +37 -0
- cecli/prompts/patch.yml +110 -0
- cecli/prompts/single_wholefile_func.yml +24 -0
- cecli/prompts/udiff.yml +106 -0
- cecli/prompts/udiff_simple.yml +13 -0
- cecli/prompts/utils/__init__.py +0 -0
- cecli/prompts/utils/prompt_registry.py +167 -0
- cecli/prompts/utils/system.py +56 -0
- cecli/prompts/wholefile.yml +50 -0
- cecli/prompts/wholefile_func.yml +24 -0
- cecli/queries/tree-sitter-language-pack/README.md +7 -0
- cecli/queries/tree-sitter-language-pack/arduino-tags.scm +5 -0
- cecli/queries/tree-sitter-language-pack/c-tags.scm +12 -0
- cecli/queries/tree-sitter-language-pack/chatito-tags.scm +16 -0
- cecli/queries/tree-sitter-language-pack/clojure-tags.scm +12 -0
- cecli/queries/tree-sitter-language-pack/commonlisp-tags.scm +127 -0
- cecli/queries/tree-sitter-language-pack/cpp-tags.scm +18 -0
- cecli/queries/tree-sitter-language-pack/csharp-tags.scm +32 -0
- cecli/queries/tree-sitter-language-pack/d-tags.scm +26 -0
- cecli/queries/tree-sitter-language-pack/dart-tags.scm +97 -0
- cecli/queries/tree-sitter-language-pack/elisp-tags.scm +5 -0
- cecli/queries/tree-sitter-language-pack/elixir-tags.scm +59 -0
- cecli/queries/tree-sitter-language-pack/elm-tags.scm +22 -0
- cecli/queries/tree-sitter-language-pack/gleam-tags.scm +41 -0
- cecli/queries/tree-sitter-language-pack/go-tags.scm +49 -0
- cecli/queries/tree-sitter-language-pack/java-tags.scm +26 -0
- cecli/queries/tree-sitter-language-pack/javascript-tags.scm +96 -0
- cecli/queries/tree-sitter-language-pack/lua-tags.scm +39 -0
- cecli/queries/tree-sitter-language-pack/matlab-tags.scm +10 -0
- cecli/queries/tree-sitter-language-pack/ocaml-tags.scm +115 -0
- cecli/queries/tree-sitter-language-pack/ocaml_interface-tags.scm +101 -0
- cecli/queries/tree-sitter-language-pack/pony-tags.scm +39 -0
- cecli/queries/tree-sitter-language-pack/properties-tags.scm +5 -0
- cecli/queries/tree-sitter-language-pack/python-tags.scm +24 -0
- cecli/queries/tree-sitter-language-pack/r-tags.scm +27 -0
- cecli/queries/tree-sitter-language-pack/racket-tags.scm +12 -0
- cecli/queries/tree-sitter-language-pack/ruby-tags.scm +69 -0
- cecli/queries/tree-sitter-language-pack/rust-tags.scm +63 -0
- cecli/queries/tree-sitter-language-pack/solidity-tags.scm +43 -0
- cecli/queries/tree-sitter-language-pack/swift-tags.scm +54 -0
- cecli/queries/tree-sitter-language-pack/udev-tags.scm +20 -0
- cecli/queries/tree-sitter-languages/README.md +24 -0
- cecli/queries/tree-sitter-languages/c-tags.scm +12 -0
- cecli/queries/tree-sitter-languages/c_sharp-tags.scm +52 -0
- cecli/queries/tree-sitter-languages/cpp-tags.scm +18 -0
- cecli/queries/tree-sitter-languages/dart-tags.scm +92 -0
- cecli/queries/tree-sitter-languages/elisp-tags.scm +8 -0
- cecli/queries/tree-sitter-languages/elixir-tags.scm +59 -0
- cecli/queries/tree-sitter-languages/elm-tags.scm +22 -0
- cecli/queries/tree-sitter-languages/fortran-tags.scm +18 -0
- cecli/queries/tree-sitter-languages/go-tags.scm +36 -0
- cecli/queries/tree-sitter-languages/haskell-tags.scm +5 -0
- cecli/queries/tree-sitter-languages/hcl-tags.scm +77 -0
- cecli/queries/tree-sitter-languages/java-tags.scm +26 -0
- cecli/queries/tree-sitter-languages/javascript-tags.scm +96 -0
- cecli/queries/tree-sitter-languages/julia-tags.scm +60 -0
- cecli/queries/tree-sitter-languages/kotlin-tags.scm +30 -0
- cecli/queries/tree-sitter-languages/matlab-tags.scm +10 -0
- cecli/queries/tree-sitter-languages/ocaml-tags.scm +115 -0
- cecli/queries/tree-sitter-languages/ocaml_interface-tags.scm +104 -0
- cecli/queries/tree-sitter-languages/php-tags.scm +32 -0
- cecli/queries/tree-sitter-languages/python-tags.scm +22 -0
- cecli/queries/tree-sitter-languages/ql-tags.scm +26 -0
- cecli/queries/tree-sitter-languages/ruby-tags.scm +69 -0
- cecli/queries/tree-sitter-languages/rust-tags.scm +63 -0
- cecli/queries/tree-sitter-languages/scala-tags.scm +64 -0
- cecli/queries/tree-sitter-languages/typescript-tags.scm +44 -0
- cecli/queries/tree-sitter-languages/zig-tags.scm +20 -0
- cecli/reasoning_tags.py +82 -0
- cecli/repo.py +626 -0
- cecli/repomap.py +1368 -0
- cecli/report.py +260 -0
- cecli/resources/__init__.py +3 -0
- cecli/resources/model-metadata.json +25751 -0
- cecli/resources/model-settings.yml +2394 -0
- cecli/resources/providers.json +67 -0
- cecli/run_cmd.py +143 -0
- cecli/scrape.py +295 -0
- cecli/sendchat.py +250 -0
- cecli/sessions.py +281 -0
- cecli/special.py +203 -0
- cecli/tools/__init__.py +72 -0
- cecli/tools/command.py +103 -0
- cecli/tools/command_interactive.py +113 -0
- cecli/tools/context_manager.py +175 -0
- cecli/tools/delete_block.py +154 -0
- cecli/tools/delete_line.py +120 -0
- cecli/tools/delete_lines.py +144 -0
- cecli/tools/extract_lines.py +281 -0
- cecli/tools/finished.py +35 -0
- cecli/tools/git_branch.py +132 -0
- cecli/tools/git_diff.py +49 -0
- cecli/tools/git_log.py +43 -0
- cecli/tools/git_remote.py +39 -0
- cecli/tools/git_show.py +37 -0
- cecli/tools/git_status.py +32 -0
- cecli/tools/grep.py +242 -0
- cecli/tools/indent_lines.py +195 -0
- cecli/tools/insert_block.py +263 -0
- cecli/tools/list_changes.py +71 -0
- cecli/tools/load_skill.py +51 -0
- cecli/tools/ls.py +77 -0
- cecli/tools/remove_skill.py +51 -0
- cecli/tools/replace_all.py +113 -0
- cecli/tools/replace_line.py +135 -0
- cecli/tools/replace_lines.py +180 -0
- cecli/tools/replace_text.py +186 -0
- cecli/tools/show_numbered_context.py +137 -0
- cecli/tools/thinking.py +52 -0
- cecli/tools/undo_change.py +82 -0
- cecli/tools/update_todo_list.py +148 -0
- cecli/tools/utils/base_tool.py +64 -0
- cecli/tools/utils/helpers.py +359 -0
- cecli/tools/utils/output.py +119 -0
- cecli/tools/utils/registry.py +145 -0
- cecli/tools/view_files_matching.py +138 -0
- cecli/tools/view_files_with_symbol.py +117 -0
- cecli/tui/__init__.py +83 -0
- cecli/tui/app.py +971 -0
- cecli/tui/io.py +566 -0
- cecli/tui/styles.tcss +117 -0
- cecli/tui/widgets/__init__.py +19 -0
- cecli/tui/widgets/completion_bar.py +331 -0
- cecli/tui/widgets/file_list.py +76 -0
- cecli/tui/widgets/footer.py +165 -0
- cecli/tui/widgets/input_area.py +320 -0
- cecli/tui/widgets/key_hints.py +16 -0
- cecli/tui/widgets/output.py +354 -0
- cecli/tui/widgets/status_bar.py +279 -0
- cecli/tui/worker.py +160 -0
- cecli/urls.py +16 -0
- cecli/utils.py +499 -0
- cecli/versioncheck.py +90 -0
- cecli/voice.py +90 -0
- cecli/waiting.py +38 -0
- cecli/watch.py +316 -0
- cecli/watch_prompts.py +12 -0
- cecli/website/Gemfile +8 -0
- cecli/website/_includes/blame.md +162 -0
- cecli/website/_includes/get-started.md +22 -0
- cecli/website/_includes/help-tip.md +5 -0
- cecli/website/_includes/help.md +24 -0
- cecli/website/_includes/install.md +5 -0
- cecli/website/_includes/keys.md +4 -0
- cecli/website/_includes/model-warnings.md +67 -0
- cecli/website/_includes/multi-line.md +22 -0
- cecli/website/_includes/python-m-aider.md +5 -0
- cecli/website/_includes/recording.css +228 -0
- cecli/website/_includes/recording.md +34 -0
- cecli/website/_includes/replit-pipx.md +9 -0
- cecli/website/_includes/works-best.md +1 -0
- cecli/website/_sass/custom/custom.scss +103 -0
- cecli/website/docs/config/adv-model-settings.md +2498 -0
- cecli/website/docs/config/agent-mode.md +320 -0
- cecli/website/docs/config/aider_conf.md +548 -0
- cecli/website/docs/config/api-keys.md +90 -0
- cecli/website/docs/config/custom-commands.md +187 -0
- cecli/website/docs/config/dotenv.md +493 -0
- cecli/website/docs/config/editor.md +127 -0
- cecli/website/docs/config/mcp.md +210 -0
- cecli/website/docs/config/model-aliases.md +173 -0
- cecli/website/docs/config/options.md +890 -0
- cecli/website/docs/config/reasoning.md +210 -0
- cecli/website/docs/config/skills.md +172 -0
- cecli/website/docs/config/tui.md +126 -0
- cecli/website/docs/config.md +44 -0
- cecli/website/docs/faq.md +379 -0
- cecli/website/docs/git.md +76 -0
- cecli/website/docs/index.md +47 -0
- cecli/website/docs/install/codespaces.md +39 -0
- cecli/website/docs/install/docker.md +48 -0
- cecli/website/docs/install/optional.md +100 -0
- cecli/website/docs/install/replit.md +8 -0
- cecli/website/docs/install.md +115 -0
- cecli/website/docs/languages.md +264 -0
- cecli/website/docs/legal/contributor-agreement.md +111 -0
- cecli/website/docs/legal/privacy.md +104 -0
- cecli/website/docs/llms/anthropic.md +77 -0
- cecli/website/docs/llms/azure.md +48 -0
- cecli/website/docs/llms/bedrock.md +132 -0
- cecli/website/docs/llms/cohere.md +34 -0
- cecli/website/docs/llms/deepseek.md +32 -0
- cecli/website/docs/llms/gemini.md +49 -0
- cecli/website/docs/llms/github.md +111 -0
- cecli/website/docs/llms/groq.md +36 -0
- cecli/website/docs/llms/lm-studio.md +39 -0
- cecli/website/docs/llms/ollama.md +75 -0
- cecli/website/docs/llms/openai-compat.md +39 -0
- cecli/website/docs/llms/openai.md +58 -0
- cecli/website/docs/llms/openrouter.md +78 -0
- cecli/website/docs/llms/other.md +117 -0
- cecli/website/docs/llms/vertex.md +50 -0
- cecli/website/docs/llms/warnings.md +10 -0
- cecli/website/docs/llms/xai.md +53 -0
- cecli/website/docs/llms.md +54 -0
- cecli/website/docs/more/analytics.md +127 -0
- cecli/website/docs/more/edit-formats.md +116 -0
- cecli/website/docs/more/infinite-output.md +192 -0
- cecli/website/docs/more-info.md +8 -0
- cecli/website/docs/recordings/auto-accept-architect.md +31 -0
- cecli/website/docs/recordings/dont-drop-original-read-files.md +35 -0
- cecli/website/docs/recordings/index.md +21 -0
- cecli/website/docs/recordings/model-accepts-settings.md +69 -0
- cecli/website/docs/recordings/tree-sitter-language-pack.md +80 -0
- cecli/website/docs/repomap.md +112 -0
- cecli/website/docs/scripting.md +100 -0
- cecli/website/docs/sessions.md +213 -0
- cecli/website/docs/troubleshooting/aider-not-found.md +24 -0
- cecli/website/docs/troubleshooting/edit-errors.md +76 -0
- cecli/website/docs/troubleshooting/imports.md +62 -0
- cecli/website/docs/troubleshooting/models-and-keys.md +54 -0
- cecli/website/docs/troubleshooting/support.md +79 -0
- cecli/website/docs/troubleshooting/token-limits.md +96 -0
- cecli/website/docs/troubleshooting/warnings.md +12 -0
- cecli/website/docs/troubleshooting.md +11 -0
- cecli/website/docs/usage/browser.md +57 -0
- cecli/website/docs/usage/caching.md +49 -0
- cecli/website/docs/usage/commands.md +133 -0
- cecli/website/docs/usage/conventions.md +119 -0
- cecli/website/docs/usage/copypaste.md +136 -0
- cecli/website/docs/usage/images-urls.md +48 -0
- cecli/website/docs/usage/lint-test.md +118 -0
- cecli/website/docs/usage/modes.md +211 -0
- cecli/website/docs/usage/not-code.md +179 -0
- cecli/website/docs/usage/notifications.md +87 -0
- cecli/website/docs/usage/tips.md +79 -0
- cecli/website/docs/usage/tutorials.md +30 -0
- cecli/website/docs/usage/voice.md +121 -0
- cecli/website/docs/usage/watch.md +294 -0
- cecli/website/docs/usage.md +102 -0
- cecli/website/share/index.md +101 -0
- cecli_dev-0.95.5.dist-info/METADATA +549 -0
- cecli_dev-0.95.5.dist-info/RECORD +366 -0
- cecli_dev-0.95.5.dist-info/WHEEL +5 -0
- cecli_dev-0.95.5.dist-info/entry_points.txt +4 -0
- cecli_dev-0.95.5.dist-info/licenses/LICENSE.txt +202 -0
- cecli_dev-0.95.5.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
"""
|
|
2
|
+
File search utilities for aider.
|
|
3
|
+
|
|
4
|
+
This module provides functions for searching and resolving file paths
|
|
5
|
+
relative to various directories (git root, home folder, .aider, .cecli, etc.).
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import List, Optional
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def generate_search_path_list(
|
|
13
|
+
default_file: str, git_root: Optional[str], command_line_file: Optional[str]
|
|
14
|
+
) -> List[str]:
|
|
15
|
+
"""
|
|
16
|
+
Generate a list of file paths to search for configuration files.
|
|
17
|
+
|
|
18
|
+
The search order is:
|
|
19
|
+
1. Home directory (~/default_file)
|
|
20
|
+
2. Git root directory (git_root/default_file) if git_root is provided
|
|
21
|
+
3. Current directory (default_file)
|
|
22
|
+
4. Command line specified file (command_line_file) if provided
|
|
23
|
+
|
|
24
|
+
Args:
|
|
25
|
+
default_file: The default filename to search for
|
|
26
|
+
git_root: The git root directory (optional)
|
|
27
|
+
command_line_file: A file specified on the command line (optional)
|
|
28
|
+
|
|
29
|
+
Returns:
|
|
30
|
+
List of resolved file paths in search order (first to last)
|
|
31
|
+
"""
|
|
32
|
+
files = []
|
|
33
|
+
files.append(Path.home() / default_file) # homedir
|
|
34
|
+
if git_root:
|
|
35
|
+
files.append(Path(git_root) / default_file) # git root
|
|
36
|
+
files.append(default_file)
|
|
37
|
+
if command_line_file:
|
|
38
|
+
files.append(command_line_file)
|
|
39
|
+
|
|
40
|
+
resolved_files = []
|
|
41
|
+
for fn in files:
|
|
42
|
+
try:
|
|
43
|
+
resolved_files.append(Path(fn).expanduser().resolve())
|
|
44
|
+
except OSError:
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
files = resolved_files
|
|
48
|
+
files.reverse()
|
|
49
|
+
uniq = []
|
|
50
|
+
for fn in files:
|
|
51
|
+
if fn not in uniq:
|
|
52
|
+
uniq.append(fn)
|
|
53
|
+
uniq.reverse()
|
|
54
|
+
files = uniq
|
|
55
|
+
files = list(map(str, files))
|
|
56
|
+
files = list(dict.fromkeys(files))
|
|
57
|
+
|
|
58
|
+
return files
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def handle_core_files(
|
|
62
|
+
file_path: str,
|
|
63
|
+
prepend_folder: Optional[str] = None,
|
|
64
|
+
namespace_folder: bool = False,
|
|
65
|
+
) -> str:
|
|
66
|
+
"""
|
|
67
|
+
Handle core configuration files, migrating from .aider to .cecli if needed.
|
|
68
|
+
|
|
69
|
+
This function receives paths with .cecli (new naming) and:
|
|
70
|
+
1. Checks if corresponding .aider versions exist
|
|
71
|
+
2. If .aider exists, copies it to the .cecli version (preserving original as backup)
|
|
72
|
+
3. Returns the .cecli path (whether copied or original)
|
|
73
|
+
|
|
74
|
+
Handles both:
|
|
75
|
+
1. Files that start with '.cecli' (e.g., '.cecli-config.yml')
|
|
76
|
+
2. Folders named '.cecli' in the path (e.g., '.cecli/config.yml')
|
|
77
|
+
|
|
78
|
+
Args:
|
|
79
|
+
file_path: The target file path with .cecli naming
|
|
80
|
+
prepend_folder: Optional folder to prepend to the path (e.g., 'configs/')
|
|
81
|
+
namespace_folder: If True, prepend '.cecli/' to the path (default: False)
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
The processed .cecli file path with any modifications applied
|
|
85
|
+
|
|
86
|
+
Example:
|
|
87
|
+
>>> handle_core_files(".cecli/config.yml", "configs", True)
|
|
88
|
+
"configs/.cecli/config.yml"
|
|
89
|
+
# If .aider/config.yml exists, it will be copied to configs/.cecli/config.yml
|
|
90
|
+
|
|
91
|
+
>>> handle_core_files(".cecli-settings.json")
|
|
92
|
+
".cecli-settings.json"
|
|
93
|
+
# If .aider-settings.json exists, it will be copied to .cecli-settings.json
|
|
94
|
+
|
|
95
|
+
>>> handle_core_files("project/.cecli/config.yml")
|
|
96
|
+
"project/.cecli/config.yml"
|
|
97
|
+
# If project/.aider/config.yml exists, it will be copied to project/.cecli/config.yml
|
|
98
|
+
"""
|
|
99
|
+
import shutil
|
|
100
|
+
from pathlib import Path, PurePath
|
|
101
|
+
|
|
102
|
+
is_path_obj = isinstance(file_path, PurePath)
|
|
103
|
+
|
|
104
|
+
# Convert to Path object for easier manipulation
|
|
105
|
+
path = Path(file_path)
|
|
106
|
+
|
|
107
|
+
# First apply prepend_folder and namespace_folder to get the target .cecli path
|
|
108
|
+
if prepend_folder:
|
|
109
|
+
path = Path(prepend_folder) / path
|
|
110
|
+
|
|
111
|
+
if namespace_folder:
|
|
112
|
+
path = Path(".cecli") / path
|
|
113
|
+
|
|
114
|
+
# Now check if this .cecli path has a corresponding .aider version that exists
|
|
115
|
+
path_str = str(path)
|
|
116
|
+
if ".cecli" in path_str:
|
|
117
|
+
# Create the corresponding .aider path by replacing .cecli with .aider
|
|
118
|
+
# Handle both folder names (.cecli/) and file names (.cecli)
|
|
119
|
+
aider_path_str = path_str.replace(".cecli/", ".aider/").replace(".cecli", ".aider", 1)
|
|
120
|
+
aider_path = Path(aider_path_str)
|
|
121
|
+
|
|
122
|
+
# Check if the .aider file/folder exists (and .cecli doesn't already exist)
|
|
123
|
+
if aider_path.exists() and not path.exists():
|
|
124
|
+
# Create parent directories for the .cecli path if needed
|
|
125
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
126
|
+
|
|
127
|
+
# Copy the file/folder from .aider to .cecli (preserve original as backup)
|
|
128
|
+
try:
|
|
129
|
+
if aider_path.is_file():
|
|
130
|
+
# Copy file with metadata preservation
|
|
131
|
+
shutil.copy2(str(aider_path), str(path))
|
|
132
|
+
elif aider_path.is_dir():
|
|
133
|
+
# Copy directory tree
|
|
134
|
+
shutil.copytree(str(aider_path), str(path), dirs_exist_ok=True)
|
|
135
|
+
except (OSError, shutil.Error) as e:
|
|
136
|
+
# If copy fails, log but continue with .cecli path
|
|
137
|
+
import logging
|
|
138
|
+
|
|
139
|
+
logging.debug(f"Failed to copy {aider_path} to {path}: {e}")
|
|
140
|
+
|
|
141
|
+
# Return the .cecli path as Path or string
|
|
142
|
+
return path if is_path_obj else str(path)
|
|
@@ -0,0 +1,552 @@
|
|
|
1
|
+
"""Unified model provider metadata caching and lookup.
|
|
2
|
+
|
|
3
|
+
Historically cecli kept separate modules per provider (OpenRouter vs OpenAI-like).
|
|
4
|
+
Those grew unwieldy and duplicated caching, request, and normalization logic.
|
|
5
|
+
This helper centralizes that behavior so every OpenAI-compatible endpoint defines
|
|
6
|
+
a small config blob and inherits the same cache + LiteLLM registration plumbing.
|
|
7
|
+
Provider configs remain curated via ``scripts/generate_providers.py`` and the
|
|
8
|
+
static per-model fallback metadata is still cleaned up with ``clean_metadata.py``.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from __future__ import annotations
|
|
12
|
+
|
|
13
|
+
import importlib.resources as importlib_resources
|
|
14
|
+
import json
|
|
15
|
+
import os
|
|
16
|
+
import re
|
|
17
|
+
import time
|
|
18
|
+
from copy import deepcopy
|
|
19
|
+
from pathlib import Path
|
|
20
|
+
from typing import Dict, Optional
|
|
21
|
+
|
|
22
|
+
import requests
|
|
23
|
+
|
|
24
|
+
from cecli.helpers.file_searcher import handle_core_files
|
|
25
|
+
|
|
26
|
+
try:
|
|
27
|
+
from litellm.llms.custom_httpx.http_handler import HTTPHandler
|
|
28
|
+
from litellm.llms.custom_llm import CustomLLM, CustomLLMError
|
|
29
|
+
from litellm.llms.openai_like.chat.handler import OpenAILikeChatHandler
|
|
30
|
+
except Exception:
|
|
31
|
+
CustomLLM = None
|
|
32
|
+
CustomLLMError = Exception
|
|
33
|
+
OpenAILikeChatHandler = None
|
|
34
|
+
HTTPHandler = None
|
|
35
|
+
RESOURCE_FILE = "providers.json"
|
|
36
|
+
_PROVIDERS_REGISTERED = False
|
|
37
|
+
_CUSTOM_HANDLERS: Dict[str, "_JSONOpenAIProvider"] = {}
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def _coerce_str(value):
|
|
41
|
+
"""Return the first string representation that litellm expects."""
|
|
42
|
+
if isinstance(value, str):
|
|
43
|
+
return value
|
|
44
|
+
if isinstance(value, list) and value:
|
|
45
|
+
return value[0]
|
|
46
|
+
return None
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def _first_env_value(names):
|
|
50
|
+
"""Return the first non-empty environment variable for the provided names."""
|
|
51
|
+
if not names:
|
|
52
|
+
return None
|
|
53
|
+
if isinstance(names, str):
|
|
54
|
+
names = [names]
|
|
55
|
+
for env_name in names or []:
|
|
56
|
+
if not env_name:
|
|
57
|
+
continue
|
|
58
|
+
val = os.environ.get(env_name)
|
|
59
|
+
if val:
|
|
60
|
+
return val
|
|
61
|
+
return None
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class _JSONOpenAIProvider(OpenAILikeChatHandler):
|
|
65
|
+
"""CustomLLM wrapper that routes OpenAI-compatible providers through LiteLLM."""
|
|
66
|
+
|
|
67
|
+
def __init__(self, slug: str, config: Dict):
|
|
68
|
+
if CustomLLM is None or OpenAILikeChatHandler is None:
|
|
69
|
+
raise RuntimeError("litellm custom handler support unavailable")
|
|
70
|
+
super().__init__()
|
|
71
|
+
self.slug = slug
|
|
72
|
+
self.config = config
|
|
73
|
+
|
|
74
|
+
def _resolve_api_base(self, api_base: Optional[str]) -> str:
|
|
75
|
+
base = (
|
|
76
|
+
api_base
|
|
77
|
+
or _first_env_value(self.config.get("base_url_env"))
|
|
78
|
+
or self.config.get("api_base")
|
|
79
|
+
)
|
|
80
|
+
if not base:
|
|
81
|
+
raise CustomLLMError(500, f"{self.slug} missing base URL")
|
|
82
|
+
return base.rstrip("/")
|
|
83
|
+
|
|
84
|
+
def _resolve_api_key(self, api_key: Optional[str]) -> Optional[str]:
|
|
85
|
+
if api_key:
|
|
86
|
+
return api_key
|
|
87
|
+
env_val = _first_env_value(self.config.get("api_key_env"))
|
|
88
|
+
return env_val
|
|
89
|
+
|
|
90
|
+
def _apply_special_handling(self, messages):
|
|
91
|
+
special = self.config.get("special_handling") or {}
|
|
92
|
+
if special.get("convert_content_list_to_string"):
|
|
93
|
+
from litellm.litellm_core_utils.prompt_templates.common_utils import (
|
|
94
|
+
handle_messages_with_content_list_to_str_conversion,
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
return handle_messages_with_content_list_to_str_conversion(messages)
|
|
98
|
+
return messages
|
|
99
|
+
|
|
100
|
+
def _inject_headers(self, headers):
|
|
101
|
+
defaults = self.config.get("default_headers") or {}
|
|
102
|
+
combined = dict(defaults)
|
|
103
|
+
combined.update(headers or {})
|
|
104
|
+
return combined
|
|
105
|
+
|
|
106
|
+
def _normalize_model_name(self, model: str) -> str:
|
|
107
|
+
if not isinstance(model, str):
|
|
108
|
+
return model
|
|
109
|
+
trimmed = model
|
|
110
|
+
if trimmed.startswith(f"{self.slug}/"):
|
|
111
|
+
trimmed = trimmed.split("/", 1)[1]
|
|
112
|
+
hf_namespace = self.config.get("hf_namespace")
|
|
113
|
+
if hf_namespace and not trimmed.startswith("hf:"):
|
|
114
|
+
trimmed = f"hf:{trimmed}"
|
|
115
|
+
return trimmed
|
|
116
|
+
|
|
117
|
+
def _build_request_params(self, optional_params, stream: bool):
|
|
118
|
+
params = dict(optional_params or {})
|
|
119
|
+
default_headers = dict(self.config.get("default_headers") or {})
|
|
120
|
+
headers = params.setdefault("extra_headers", default_headers)
|
|
121
|
+
if headers is default_headers and default_headers:
|
|
122
|
+
params["extra_headers"] = dict(default_headers)
|
|
123
|
+
if stream:
|
|
124
|
+
params["stream"] = True
|
|
125
|
+
return params
|
|
126
|
+
|
|
127
|
+
def completion(self, *args, **kwargs):
|
|
128
|
+
kwargs["api_base"] = self._resolve_api_base(kwargs.get("api_base", None))
|
|
129
|
+
kwargs["api_key"] = self._resolve_api_key(kwargs.get("api_key", None))
|
|
130
|
+
kwargs["headers"] = self._inject_headers(kwargs.get("headers", None))
|
|
131
|
+
kwargs["optional_params"] = self._build_request_params(
|
|
132
|
+
kwargs.get("optional_params", None), False
|
|
133
|
+
)
|
|
134
|
+
kwargs["messages"] = self._apply_special_handling(kwargs.get("messages", []))
|
|
135
|
+
kwargs["model"] = self._normalize_model_name(kwargs.get("model", None))
|
|
136
|
+
kwargs["custom_llm_provider"] = "openai"
|
|
137
|
+
return super().completion(*args, **kwargs)
|
|
138
|
+
|
|
139
|
+
async def acompletion(self, *args, **kwargs):
|
|
140
|
+
kwargs["api_base"] = self._resolve_api_base(kwargs.get("api_base", None))
|
|
141
|
+
kwargs["api_key"] = self._resolve_api_key(kwargs.get("api_key", None))
|
|
142
|
+
kwargs["headers"] = self._inject_headers(kwargs.get("headers", None))
|
|
143
|
+
kwargs["optional_params"] = self._build_request_params(
|
|
144
|
+
kwargs.get("optional_params", None), False
|
|
145
|
+
)
|
|
146
|
+
kwargs["messages"] = self._apply_special_handling(kwargs.get("messages", []))
|
|
147
|
+
kwargs["model"] = self._normalize_model_name(kwargs.get("model", None))
|
|
148
|
+
kwargs["custom_llm_provider"] = "openai"
|
|
149
|
+
kwargs["acompletion"] = True
|
|
150
|
+
return await super().completion(*args, **kwargs)
|
|
151
|
+
|
|
152
|
+
def streaming(self, *args, **kwargs):
|
|
153
|
+
kwargs["api_base"] = self._resolve_api_base(kwargs.get("api_base", None))
|
|
154
|
+
kwargs["api_key"] = self._resolve_api_key(kwargs.get("api_key", None))
|
|
155
|
+
kwargs["headers"] = self._inject_headers(kwargs.get("headers", None))
|
|
156
|
+
kwargs["optional_params"] = self._build_request_params(
|
|
157
|
+
kwargs.get("optional_params", None), True
|
|
158
|
+
)
|
|
159
|
+
kwargs["messages"] = self._apply_special_handling(kwargs.get("messages", []))
|
|
160
|
+
kwargs["model"] = self._normalize_model_name(kwargs.get("model", None))
|
|
161
|
+
kwargs["custom_llm_provider"] = "openai"
|
|
162
|
+
response = super().completion(*args, **kwargs)
|
|
163
|
+
for chunk in response:
|
|
164
|
+
yield self.get_generic_chunk(chunk)
|
|
165
|
+
|
|
166
|
+
async def astreaming(self, *args, **kwargs):
|
|
167
|
+
kwargs["api_base"] = self._resolve_api_base(kwargs.get("api_base", None))
|
|
168
|
+
kwargs["api_key"] = self._resolve_api_key(kwargs.get("api_key", None))
|
|
169
|
+
kwargs["headers"] = self._inject_headers(kwargs.get("headers", None))
|
|
170
|
+
kwargs["optional_params"] = self._build_request_params(
|
|
171
|
+
kwargs.get("optional_params", None), True
|
|
172
|
+
)
|
|
173
|
+
kwargs["messages"] = self._apply_special_handling(kwargs.get("messages", []))
|
|
174
|
+
kwargs["model"] = self._normalize_model_name(kwargs.get("model", None))
|
|
175
|
+
kwargs["custom_llm_provider"] = "openai"
|
|
176
|
+
kwargs["acompletion"] = True
|
|
177
|
+
response = await super().completion(*args, **kwargs)
|
|
178
|
+
async for chunk in response:
|
|
179
|
+
yield self.get_generic_chunk(chunk)
|
|
180
|
+
|
|
181
|
+
def get_generic_chunk(self, chunk):
|
|
182
|
+
choice = chunk.choices[0] if chunk.choices else None
|
|
183
|
+
delta = choice.delta if choice else None
|
|
184
|
+
text_content = delta.content if delta and delta.content else ""
|
|
185
|
+
tool_calls = delta.tool_calls if delta and delta.tool_calls else None
|
|
186
|
+
if tool_calls and len(tool_calls):
|
|
187
|
+
tool_calls = tool_calls[0]
|
|
188
|
+
usage_data = getattr(chunk, "usage", None)
|
|
189
|
+
if hasattr(usage_data, "model_dump"):
|
|
190
|
+
usage_dict = usage_data.model_dump()
|
|
191
|
+
elif isinstance(usage_data, dict):
|
|
192
|
+
usage_dict = usage_data
|
|
193
|
+
else:
|
|
194
|
+
usage_dict = {"completion_tokens": 0, "prompt_tokens": 0, "total_tokens": 0}
|
|
195
|
+
generic_chunk = {
|
|
196
|
+
"finish_reason": choice.finish_reason if choice else None,
|
|
197
|
+
"index": choice.index if choice else 0,
|
|
198
|
+
"is_finished": bool(choice.finish_reason) if choice else False,
|
|
199
|
+
"text": text_content,
|
|
200
|
+
"tool_use": tool_calls,
|
|
201
|
+
"usage": usage_dict,
|
|
202
|
+
}
|
|
203
|
+
return generic_chunk
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def _register_provider_with_litellm(slug: str, config: Dict) -> None:
|
|
207
|
+
"""Register provider metadata and custom handlers with LiteLLM."""
|
|
208
|
+
try:
|
|
209
|
+
from litellm.llms.openai_like.json_loader import JSONProviderRegistry
|
|
210
|
+
except Exception:
|
|
211
|
+
return
|
|
212
|
+
JSONProviderRegistry.load()
|
|
213
|
+
base_url = config.get("api_base")
|
|
214
|
+
api_key_env = _coerce_str(config.get("api_key_env"))
|
|
215
|
+
if not base_url or not api_key_env:
|
|
216
|
+
return
|
|
217
|
+
try:
|
|
218
|
+
import litellm
|
|
219
|
+
except Exception:
|
|
220
|
+
return
|
|
221
|
+
handler = _CUSTOM_HANDLERS.get(slug)
|
|
222
|
+
if handler is None:
|
|
223
|
+
handler = _JSONOpenAIProvider(slug, config)
|
|
224
|
+
_CUSTOM_HANDLERS[slug] = handler
|
|
225
|
+
if handler is None:
|
|
226
|
+
return
|
|
227
|
+
already_present = any(item.get("provider") == slug for item in litellm.custom_provider_map)
|
|
228
|
+
if not already_present:
|
|
229
|
+
litellm.custom_provider_map.append({"provider": slug, "custom_handler": handler})
|
|
230
|
+
try:
|
|
231
|
+
litellm.custom_llm_setup()
|
|
232
|
+
except Exception:
|
|
233
|
+
pass
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
def _deep_merge(base: Dict, override: Dict) -> Dict:
|
|
237
|
+
"""Recursively merge override dict into base without mutating inputs."""
|
|
238
|
+
result = deepcopy(base)
|
|
239
|
+
for key, value in override.items():
|
|
240
|
+
if isinstance(value, dict) and isinstance(result.get(key), dict):
|
|
241
|
+
result[key] = _deep_merge(result[key], value)
|
|
242
|
+
else:
|
|
243
|
+
result[key] = deepcopy(value)
|
|
244
|
+
return result
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
def _load_provider_configs() -> Dict[str, Dict]:
|
|
248
|
+
"""Load provider configuration overrides from the packaged JSON file."""
|
|
249
|
+
configs: Dict[str, Dict] = {}
|
|
250
|
+
try:
|
|
251
|
+
resource = importlib_resources.files("cecli.resources").joinpath(RESOURCE_FILE)
|
|
252
|
+
data = json.loads(resource.read_text())
|
|
253
|
+
except (FileNotFoundError, json.JSONDecodeError):
|
|
254
|
+
data = {}
|
|
255
|
+
for provider, override in data.items():
|
|
256
|
+
base = configs.get(provider, {})
|
|
257
|
+
configs[provider] = _deep_merge(base, override)
|
|
258
|
+
return configs
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
PROVIDER_CONFIGS = _load_provider_configs()
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
class ModelProviderManager:
|
|
265
|
+
CACHE_TTL = 60 * 60 * 24
|
|
266
|
+
DEFAULT_TOKEN_PRICE_RATIO = 1000000
|
|
267
|
+
|
|
268
|
+
def __init__(self, provider_configs: Optional[Dict[str, Dict]] = None) -> None:
|
|
269
|
+
self.cache_dir = handle_core_files(Path.home() / ".cecli" / "caches")
|
|
270
|
+
self.verify_ssl: bool = True
|
|
271
|
+
self.provider_configs = provider_configs or deepcopy(PROVIDER_CONFIGS)
|
|
272
|
+
self._provider_cache: Dict[str, Dict | None] = {}
|
|
273
|
+
self._cache_loaded: Dict[str, bool] = {}
|
|
274
|
+
for name in self.provider_configs:
|
|
275
|
+
self._provider_cache[name] = None
|
|
276
|
+
self._cache_loaded[name] = False
|
|
277
|
+
|
|
278
|
+
def set_verify_ssl(self, verify_ssl: bool) -> None:
|
|
279
|
+
self.verify_ssl = verify_ssl
|
|
280
|
+
|
|
281
|
+
def supports_provider(self, provider: Optional[str]) -> bool:
|
|
282
|
+
return bool(provider and provider in self.provider_configs)
|
|
283
|
+
|
|
284
|
+
def get_provider_config(self, provider: Optional[str]) -> Optional[Dict]:
|
|
285
|
+
if not provider:
|
|
286
|
+
return None
|
|
287
|
+
config = self.provider_configs.get(provider)
|
|
288
|
+
if not config:
|
|
289
|
+
return None
|
|
290
|
+
config = dict(config)
|
|
291
|
+
config.setdefault("litellm_provider", provider)
|
|
292
|
+
return config
|
|
293
|
+
|
|
294
|
+
def get_provider_base_url(self, provider: Optional[str]) -> Optional[str]:
|
|
295
|
+
config = self.get_provider_config(provider)
|
|
296
|
+
if not config:
|
|
297
|
+
return None
|
|
298
|
+
base_envs = config.get("base_url_env") or []
|
|
299
|
+
for env_var in base_envs:
|
|
300
|
+
val = os.environ.get(env_var)
|
|
301
|
+
if val:
|
|
302
|
+
return val.rstrip("/")
|
|
303
|
+
return config.get("api_base")
|
|
304
|
+
|
|
305
|
+
def get_required_api_keys(self, provider: Optional[str]) -> list[str]:
|
|
306
|
+
config = self.get_provider_config(provider)
|
|
307
|
+
if not config:
|
|
308
|
+
return []
|
|
309
|
+
return list(config.get("api_key_env", []))
|
|
310
|
+
|
|
311
|
+
def get_model_info(self, model: str) -> Dict:
|
|
312
|
+
provider, route = self._split_model(model)
|
|
313
|
+
if not provider or not self._ensure_provider_state(provider):
|
|
314
|
+
return {}
|
|
315
|
+
content = self._ensure_content(provider)
|
|
316
|
+
record = self._find_record(content, route)
|
|
317
|
+
if not record and self.refresh_provider_cache(provider):
|
|
318
|
+
content = self._provider_cache.get(provider)
|
|
319
|
+
record = self._find_record(content, route)
|
|
320
|
+
if not record:
|
|
321
|
+
return {}
|
|
322
|
+
return self._record_to_info(record, provider)
|
|
323
|
+
|
|
324
|
+
def get_models_for_listing(self) -> Dict[str, Dict]:
|
|
325
|
+
listings: Dict[str, Dict] = {}
|
|
326
|
+
for provider in list(self.provider_configs.keys()):
|
|
327
|
+
content = self._ensure_content(provider)
|
|
328
|
+
if not content or "data" not in content:
|
|
329
|
+
continue
|
|
330
|
+
for record in content["data"]:
|
|
331
|
+
model_id = record.get("id")
|
|
332
|
+
if not model_id:
|
|
333
|
+
continue
|
|
334
|
+
info = self._record_to_info(record, provider)
|
|
335
|
+
if info:
|
|
336
|
+
listings[model_id] = info
|
|
337
|
+
return listings
|
|
338
|
+
|
|
339
|
+
def refresh_provider_cache(self, provider: str) -> bool:
|
|
340
|
+
if not self._ensure_provider_state(provider):
|
|
341
|
+
return False
|
|
342
|
+
config = self.provider_configs[provider]
|
|
343
|
+
if not config.get("models_url") and not config.get("api_base"):
|
|
344
|
+
return False
|
|
345
|
+
self._provider_cache[provider] = None
|
|
346
|
+
self._cache_loaded[provider] = True
|
|
347
|
+
self._update_cache(provider)
|
|
348
|
+
return bool(self._provider_cache.get(provider))
|
|
349
|
+
|
|
350
|
+
def _ensure_provider_state(self, provider: str) -> bool:
|
|
351
|
+
if provider not in self.provider_configs:
|
|
352
|
+
return False
|
|
353
|
+
self._provider_cache.setdefault(provider, None)
|
|
354
|
+
self._cache_loaded.setdefault(provider, False)
|
|
355
|
+
return True
|
|
356
|
+
|
|
357
|
+
def _split_model(self, model: str) -> tuple[Optional[str], str]:
|
|
358
|
+
if "/" not in model:
|
|
359
|
+
return None, model
|
|
360
|
+
provider, route = model.split("/", 1)
|
|
361
|
+
return provider, route
|
|
362
|
+
|
|
363
|
+
def _ensure_content(self, provider: str) -> Optional[Dict]:
|
|
364
|
+
self._load_cache(provider)
|
|
365
|
+
if not self._provider_cache.get(provider):
|
|
366
|
+
self._update_cache(provider)
|
|
367
|
+
return self._provider_cache.get(provider)
|
|
368
|
+
|
|
369
|
+
def _find_record(self, content: Optional[Dict], route: str) -> Optional[Dict]:
|
|
370
|
+
if not content or "data" not in content:
|
|
371
|
+
return None
|
|
372
|
+
candidates = {route}
|
|
373
|
+
if ":" in route:
|
|
374
|
+
candidates.add(route.split(":", 1)[0])
|
|
375
|
+
return next((item for item in content["data"] if item.get("id") in candidates), None)
|
|
376
|
+
|
|
377
|
+
def _record_to_info(self, record: Dict, provider: str) -> Dict:
|
|
378
|
+
context_len = _first_value(
|
|
379
|
+
record,
|
|
380
|
+
"max_input_tokens",
|
|
381
|
+
"max_tokens",
|
|
382
|
+
"max_output_tokens",
|
|
383
|
+
"context_length",
|
|
384
|
+
"context_window",
|
|
385
|
+
"top_provider_context_length",
|
|
386
|
+
"top_provider",
|
|
387
|
+
)
|
|
388
|
+
if isinstance(context_len, dict):
|
|
389
|
+
context_len = context_len.get("context_length") or context_len.get("max_tokens")
|
|
390
|
+
pricing = record.get("pricing", {}) if isinstance(record.get("pricing"), dict) else {}
|
|
391
|
+
input_cost = _cost_per_token(
|
|
392
|
+
_first_value(pricing, "prompt", "input", "prompt_tokens")
|
|
393
|
+
or _first_value(record, "input_cost_per_token", "prompt_cost_per_token")
|
|
394
|
+
)
|
|
395
|
+
output_cost = _cost_per_token(
|
|
396
|
+
_first_value(pricing, "completion", "output", "completion_tokens")
|
|
397
|
+
or _first_value(record, "output_cost_per_token", "completion_cost_per_token")
|
|
398
|
+
)
|
|
399
|
+
max_tokens = _first_value(
|
|
400
|
+
record,
|
|
401
|
+
"max_tokens",
|
|
402
|
+
"max_input_tokens",
|
|
403
|
+
"context_length",
|
|
404
|
+
"context_window",
|
|
405
|
+
"top_provider_context_length",
|
|
406
|
+
)
|
|
407
|
+
max_output_tokens = _first_value(
|
|
408
|
+
record,
|
|
409
|
+
"max_output_tokens",
|
|
410
|
+
"max_tokens",
|
|
411
|
+
"context_length",
|
|
412
|
+
"context_window",
|
|
413
|
+
"top_provider_context_length",
|
|
414
|
+
)
|
|
415
|
+
if max_tokens is None:
|
|
416
|
+
max_tokens = context_len
|
|
417
|
+
if max_output_tokens is None:
|
|
418
|
+
max_output_tokens = context_len
|
|
419
|
+
|
|
420
|
+
def _normalize_cost(cost: Optional[float]) -> float:
|
|
421
|
+
if cost is None or cost == 0:
|
|
422
|
+
return 0.0
|
|
423
|
+
if cost >= 0.001:
|
|
424
|
+
return cost / self.DEFAULT_TOKEN_PRICE_RATIO
|
|
425
|
+
return cost
|
|
426
|
+
|
|
427
|
+
info = {
|
|
428
|
+
"max_input_tokens": context_len,
|
|
429
|
+
"max_tokens": max_tokens,
|
|
430
|
+
"max_output_tokens": max_output_tokens,
|
|
431
|
+
"input_cost_per_token": _normalize_cost(input_cost),
|
|
432
|
+
"output_cost_per_token": _normalize_cost(output_cost),
|
|
433
|
+
"litellm_provider": provider,
|
|
434
|
+
"mode": record.get("mode", "chat"),
|
|
435
|
+
}
|
|
436
|
+
return {k: v for k, v in info.items() if v is not None}
|
|
437
|
+
|
|
438
|
+
def _get_cache_file(self, provider: str) -> Path:
|
|
439
|
+
fname = f"{provider}_models.json"
|
|
440
|
+
return self.cache_dir / fname
|
|
441
|
+
|
|
442
|
+
def _load_cache(self, provider: str) -> None:
|
|
443
|
+
if self._cache_loaded.get(provider):
|
|
444
|
+
return
|
|
445
|
+
cache_file = self._get_cache_file(provider)
|
|
446
|
+
try:
|
|
447
|
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
|
448
|
+
if cache_file.exists():
|
|
449
|
+
cache_age = time.time() - cache_file.stat().st_mtime
|
|
450
|
+
if cache_age < self.CACHE_TTL:
|
|
451
|
+
try:
|
|
452
|
+
self._provider_cache[provider] = json.loads(cache_file.read_text())
|
|
453
|
+
except json.JSONDecodeError:
|
|
454
|
+
self._provider_cache[provider] = None
|
|
455
|
+
except OSError:
|
|
456
|
+
pass
|
|
457
|
+
self._cache_loaded[provider] = True
|
|
458
|
+
|
|
459
|
+
def _update_cache(self, provider: str) -> None:
|
|
460
|
+
payload = self._fetch_provider_models(provider)
|
|
461
|
+
cache_file = self._get_cache_file(provider)
|
|
462
|
+
if payload:
|
|
463
|
+
self._provider_cache[provider] = payload
|
|
464
|
+
try:
|
|
465
|
+
cache_file.write_text(json.dumps(payload, indent=2))
|
|
466
|
+
except OSError:
|
|
467
|
+
pass
|
|
468
|
+
return
|
|
469
|
+
static_models = self.provider_configs[provider].get("static_models")
|
|
470
|
+
if static_models and not self._provider_cache.get(provider):
|
|
471
|
+
self._provider_cache[provider] = {"data": static_models}
|
|
472
|
+
|
|
473
|
+
def _fetch_provider_models(self, provider: str) -> Optional[Dict]:
|
|
474
|
+
config = self.provider_configs[provider]
|
|
475
|
+
models_url = config.get("models_url")
|
|
476
|
+
if not models_url:
|
|
477
|
+
api_base = config.get("api_base")
|
|
478
|
+
if api_base:
|
|
479
|
+
models_url = api_base.rstrip("/") + "/models"
|
|
480
|
+
if not models_url:
|
|
481
|
+
return None
|
|
482
|
+
headers = {}
|
|
483
|
+
default_headers = config.get("default_headers") or {}
|
|
484
|
+
headers.update(default_headers)
|
|
485
|
+
api_key = self._get_api_key(provider)
|
|
486
|
+
requires_api_key = config.get("requires_api_key", True)
|
|
487
|
+
if api_key:
|
|
488
|
+
headers["Authorization"] = f"Bearer {api_key}"
|
|
489
|
+
elif requires_api_key:
|
|
490
|
+
return None
|
|
491
|
+
try:
|
|
492
|
+
response = requests.get(
|
|
493
|
+
models_url,
|
|
494
|
+
headers=headers or None,
|
|
495
|
+
timeout=config.get("timeout", 10),
|
|
496
|
+
verify=self.verify_ssl,
|
|
497
|
+
)
|
|
498
|
+
response.raise_for_status()
|
|
499
|
+
return response.json()
|
|
500
|
+
except Exception as ex:
|
|
501
|
+
print(f"Failed to fetch {provider} model list: {ex}")
|
|
502
|
+
return None
|
|
503
|
+
|
|
504
|
+
def _get_api_key(self, provider: str) -> Optional[str]:
|
|
505
|
+
config = self.provider_configs[provider]
|
|
506
|
+
for env_var in config.get("api_key_env", []):
|
|
507
|
+
value = os.environ.get(env_var)
|
|
508
|
+
if value:
|
|
509
|
+
return value
|
|
510
|
+
return None
|
|
511
|
+
|
|
512
|
+
|
|
513
|
+
def ensure_litellm_providers_registered() -> None:
|
|
514
|
+
"""One-time registration guard for LiteLLM provider metadata."""
|
|
515
|
+
global _PROVIDERS_REGISTERED
|
|
516
|
+
if _PROVIDERS_REGISTERED:
|
|
517
|
+
return
|
|
518
|
+
for slug, cfg in PROVIDER_CONFIGS.items():
|
|
519
|
+
_register_provider_with_litellm(slug, cfg)
|
|
520
|
+
_PROVIDERS_REGISTERED = True
|
|
521
|
+
|
|
522
|
+
|
|
523
|
+
_NUMBER_RE = re.compile("-?(?:\\d+(?:\\.\\d*)?|\\.\\d+)(?:[eE][+-]?\\d+)?")
|
|
524
|
+
|
|
525
|
+
|
|
526
|
+
def _cost_per_token(val: Optional[str | float | int]) -> Optional[float]:
|
|
527
|
+
"""Parse token pricing strings into floats, tolerating currency prefixes."""
|
|
528
|
+
if val in (None, "", "-", "N/A"):
|
|
529
|
+
return None
|
|
530
|
+
if val == "0":
|
|
531
|
+
return 0.0
|
|
532
|
+
if isinstance(val, str):
|
|
533
|
+
cleaned = val.strip().replace(",", "")
|
|
534
|
+
if cleaned.startswith("$"):
|
|
535
|
+
cleaned = cleaned[1:]
|
|
536
|
+
match = _NUMBER_RE.search(cleaned)
|
|
537
|
+
if not match:
|
|
538
|
+
return None
|
|
539
|
+
val = match.group(0)
|
|
540
|
+
try:
|
|
541
|
+
return float(val)
|
|
542
|
+
except (TypeError, ValueError):
|
|
543
|
+
return None
|
|
544
|
+
|
|
545
|
+
|
|
546
|
+
def _first_value(record: Dict, *keys: str):
|
|
547
|
+
"""Return the first non-empty value for the provided keys."""
|
|
548
|
+
for key in keys:
|
|
549
|
+
value = record.get(key)
|
|
550
|
+
if value not in (None, ""):
|
|
551
|
+
return value
|
|
552
|
+
return None
|