cecli-dev 0.95.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cecli/__init__.py +20 -0
- cecli/__main__.py +4 -0
- cecli/_version.py +34 -0
- cecli/args.py +1092 -0
- cecli/args_formatter.py +228 -0
- cecli/change_tracker.py +133 -0
- cecli/coders/__init__.py +38 -0
- cecli/coders/agent_coder.py +1872 -0
- cecli/coders/architect_coder.py +63 -0
- cecli/coders/ask_coder.py +8 -0
- cecli/coders/base_coder.py +3993 -0
- cecli/coders/chat_chunks.py +116 -0
- cecli/coders/context_coder.py +52 -0
- cecli/coders/copypaste_coder.py +269 -0
- cecli/coders/editblock_coder.py +656 -0
- cecli/coders/editblock_fenced_coder.py +9 -0
- cecli/coders/editblock_func_coder.py +140 -0
- cecli/coders/editor_diff_fenced_coder.py +8 -0
- cecli/coders/editor_editblock_coder.py +8 -0
- cecli/coders/editor_whole_coder.py +8 -0
- cecli/coders/help_coder.py +15 -0
- cecli/coders/patch_coder.py +705 -0
- cecli/coders/search_replace.py +757 -0
- cecli/coders/shell.py +37 -0
- cecli/coders/single_wholefile_func_coder.py +101 -0
- cecli/coders/udiff_coder.py +428 -0
- cecli/coders/udiff_simple.py +12 -0
- cecli/coders/wholefile_coder.py +143 -0
- cecli/coders/wholefile_func_coder.py +133 -0
- cecli/commands/__init__.py +192 -0
- cecli/commands/add.py +226 -0
- cecli/commands/agent.py +51 -0
- cecli/commands/architect.py +46 -0
- cecli/commands/ask.py +44 -0
- cecli/commands/chat_mode.py +0 -0
- cecli/commands/clear.py +37 -0
- cecli/commands/code.py +46 -0
- cecli/commands/command_prefix.py +44 -0
- cecli/commands/commit.py +52 -0
- cecli/commands/context.py +47 -0
- cecli/commands/context_blocks.py +124 -0
- cecli/commands/context_management.py +51 -0
- cecli/commands/copy.py +62 -0
- cecli/commands/copy_context.py +81 -0
- cecli/commands/core.py +287 -0
- cecli/commands/diff.py +68 -0
- cecli/commands/drop.py +217 -0
- cecli/commands/editor.py +78 -0
- cecli/commands/exit.py +55 -0
- cecli/commands/git.py +57 -0
- cecli/commands/help.py +140 -0
- cecli/commands/history_search.py +40 -0
- cecli/commands/lint.py +109 -0
- cecli/commands/list_sessions.py +56 -0
- cecli/commands/load.py +85 -0
- cecli/commands/load_session.py +48 -0
- cecli/commands/load_skill.py +68 -0
- cecli/commands/ls.py +75 -0
- cecli/commands/map.py +37 -0
- cecli/commands/map_refresh.py +35 -0
- cecli/commands/model.py +118 -0
- cecli/commands/models.py +41 -0
- cecli/commands/multiline_mode.py +38 -0
- cecli/commands/paste.py +91 -0
- cecli/commands/quit.py +32 -0
- cecli/commands/read_only.py +267 -0
- cecli/commands/read_only_stub.py +270 -0
- cecli/commands/reasoning_effort.py +70 -0
- cecli/commands/remove_skill.py +68 -0
- cecli/commands/report.py +40 -0
- cecli/commands/reset.py +88 -0
- cecli/commands/run.py +99 -0
- cecli/commands/save.py +49 -0
- cecli/commands/save_session.py +43 -0
- cecli/commands/settings.py +69 -0
- cecli/commands/test.py +58 -0
- cecli/commands/think_tokens.py +74 -0
- cecli/commands/tokens.py +207 -0
- cecli/commands/undo.py +145 -0
- cecli/commands/utils/__init__.py +0 -0
- cecli/commands/utils/base_command.py +131 -0
- cecli/commands/utils/helpers.py +142 -0
- cecli/commands/utils/registry.py +53 -0
- cecli/commands/utils/save_load_manager.py +98 -0
- cecli/commands/voice.py +78 -0
- cecli/commands/weak_model.py +123 -0
- cecli/commands/web.py +87 -0
- cecli/deprecated_args.py +185 -0
- cecli/diffs.py +129 -0
- cecli/dump.py +29 -0
- cecli/editor.py +147 -0
- cecli/exceptions.py +115 -0
- cecli/format_settings.py +26 -0
- cecli/help.py +119 -0
- cecli/help_pats.py +19 -0
- cecli/helpers/__init__.py +9 -0
- cecli/helpers/copypaste.py +123 -0
- cecli/helpers/coroutines.py +8 -0
- cecli/helpers/file_searcher.py +142 -0
- cecli/helpers/model_providers.py +552 -0
- cecli/helpers/plugin_manager.py +81 -0
- cecli/helpers/profiler.py +162 -0
- cecli/helpers/requests.py +77 -0
- cecli/helpers/similarity.py +98 -0
- cecli/helpers/skills.py +577 -0
- cecli/history.py +186 -0
- cecli/io.py +1782 -0
- cecli/linter.py +304 -0
- cecli/llm.py +101 -0
- cecli/main.py +1280 -0
- cecli/mcp/__init__.py +154 -0
- cecli/mcp/oauth.py +250 -0
- cecli/mcp/server.py +278 -0
- cecli/mdstream.py +243 -0
- cecli/models.py +1255 -0
- cecli/onboarding.py +301 -0
- cecli/prompts/__init__.py +0 -0
- cecli/prompts/agent.yml +71 -0
- cecli/prompts/architect.yml +35 -0
- cecli/prompts/ask.yml +31 -0
- cecli/prompts/base.yml +99 -0
- cecli/prompts/context.yml +60 -0
- cecli/prompts/copypaste.yml +5 -0
- cecli/prompts/editblock.yml +143 -0
- cecli/prompts/editblock_fenced.yml +106 -0
- cecli/prompts/editblock_func.yml +25 -0
- cecli/prompts/editor_diff_fenced.yml +115 -0
- cecli/prompts/editor_editblock.yml +121 -0
- cecli/prompts/editor_whole.yml +46 -0
- cecli/prompts/help.yml +37 -0
- cecli/prompts/patch.yml +110 -0
- cecli/prompts/single_wholefile_func.yml +24 -0
- cecli/prompts/udiff.yml +106 -0
- cecli/prompts/udiff_simple.yml +13 -0
- cecli/prompts/utils/__init__.py +0 -0
- cecli/prompts/utils/prompt_registry.py +167 -0
- cecli/prompts/utils/system.py +56 -0
- cecli/prompts/wholefile.yml +50 -0
- cecli/prompts/wholefile_func.yml +24 -0
- cecli/queries/tree-sitter-language-pack/README.md +7 -0
- cecli/queries/tree-sitter-language-pack/arduino-tags.scm +5 -0
- cecli/queries/tree-sitter-language-pack/c-tags.scm +12 -0
- cecli/queries/tree-sitter-language-pack/chatito-tags.scm +16 -0
- cecli/queries/tree-sitter-language-pack/clojure-tags.scm +12 -0
- cecli/queries/tree-sitter-language-pack/commonlisp-tags.scm +127 -0
- cecli/queries/tree-sitter-language-pack/cpp-tags.scm +18 -0
- cecli/queries/tree-sitter-language-pack/csharp-tags.scm +32 -0
- cecli/queries/tree-sitter-language-pack/d-tags.scm +26 -0
- cecli/queries/tree-sitter-language-pack/dart-tags.scm +97 -0
- cecli/queries/tree-sitter-language-pack/elisp-tags.scm +5 -0
- cecli/queries/tree-sitter-language-pack/elixir-tags.scm +59 -0
- cecli/queries/tree-sitter-language-pack/elm-tags.scm +22 -0
- cecli/queries/tree-sitter-language-pack/gleam-tags.scm +41 -0
- cecli/queries/tree-sitter-language-pack/go-tags.scm +49 -0
- cecli/queries/tree-sitter-language-pack/java-tags.scm +26 -0
- cecli/queries/tree-sitter-language-pack/javascript-tags.scm +96 -0
- cecli/queries/tree-sitter-language-pack/lua-tags.scm +39 -0
- cecli/queries/tree-sitter-language-pack/matlab-tags.scm +10 -0
- cecli/queries/tree-sitter-language-pack/ocaml-tags.scm +115 -0
- cecli/queries/tree-sitter-language-pack/ocaml_interface-tags.scm +101 -0
- cecli/queries/tree-sitter-language-pack/pony-tags.scm +39 -0
- cecli/queries/tree-sitter-language-pack/properties-tags.scm +5 -0
- cecli/queries/tree-sitter-language-pack/python-tags.scm +24 -0
- cecli/queries/tree-sitter-language-pack/r-tags.scm +27 -0
- cecli/queries/tree-sitter-language-pack/racket-tags.scm +12 -0
- cecli/queries/tree-sitter-language-pack/ruby-tags.scm +69 -0
- cecli/queries/tree-sitter-language-pack/rust-tags.scm +63 -0
- cecli/queries/tree-sitter-language-pack/solidity-tags.scm +43 -0
- cecli/queries/tree-sitter-language-pack/swift-tags.scm +54 -0
- cecli/queries/tree-sitter-language-pack/udev-tags.scm +20 -0
- cecli/queries/tree-sitter-languages/README.md +24 -0
- cecli/queries/tree-sitter-languages/c-tags.scm +12 -0
- cecli/queries/tree-sitter-languages/c_sharp-tags.scm +52 -0
- cecli/queries/tree-sitter-languages/cpp-tags.scm +18 -0
- cecli/queries/tree-sitter-languages/dart-tags.scm +92 -0
- cecli/queries/tree-sitter-languages/elisp-tags.scm +8 -0
- cecli/queries/tree-sitter-languages/elixir-tags.scm +59 -0
- cecli/queries/tree-sitter-languages/elm-tags.scm +22 -0
- cecli/queries/tree-sitter-languages/fortran-tags.scm +18 -0
- cecli/queries/tree-sitter-languages/go-tags.scm +36 -0
- cecli/queries/tree-sitter-languages/haskell-tags.scm +5 -0
- cecli/queries/tree-sitter-languages/hcl-tags.scm +77 -0
- cecli/queries/tree-sitter-languages/java-tags.scm +26 -0
- cecli/queries/tree-sitter-languages/javascript-tags.scm +96 -0
- cecli/queries/tree-sitter-languages/julia-tags.scm +60 -0
- cecli/queries/tree-sitter-languages/kotlin-tags.scm +30 -0
- cecli/queries/tree-sitter-languages/matlab-tags.scm +10 -0
- cecli/queries/tree-sitter-languages/ocaml-tags.scm +115 -0
- cecli/queries/tree-sitter-languages/ocaml_interface-tags.scm +104 -0
- cecli/queries/tree-sitter-languages/php-tags.scm +32 -0
- cecli/queries/tree-sitter-languages/python-tags.scm +22 -0
- cecli/queries/tree-sitter-languages/ql-tags.scm +26 -0
- cecli/queries/tree-sitter-languages/ruby-tags.scm +69 -0
- cecli/queries/tree-sitter-languages/rust-tags.scm +63 -0
- cecli/queries/tree-sitter-languages/scala-tags.scm +64 -0
- cecli/queries/tree-sitter-languages/typescript-tags.scm +44 -0
- cecli/queries/tree-sitter-languages/zig-tags.scm +20 -0
- cecli/reasoning_tags.py +82 -0
- cecli/repo.py +626 -0
- cecli/repomap.py +1368 -0
- cecli/report.py +260 -0
- cecli/resources/__init__.py +3 -0
- cecli/resources/model-metadata.json +25751 -0
- cecli/resources/model-settings.yml +2394 -0
- cecli/resources/providers.json +67 -0
- cecli/run_cmd.py +143 -0
- cecli/scrape.py +295 -0
- cecli/sendchat.py +250 -0
- cecli/sessions.py +281 -0
- cecli/special.py +203 -0
- cecli/tools/__init__.py +72 -0
- cecli/tools/command.py +103 -0
- cecli/tools/command_interactive.py +113 -0
- cecli/tools/context_manager.py +175 -0
- cecli/tools/delete_block.py +154 -0
- cecli/tools/delete_line.py +120 -0
- cecli/tools/delete_lines.py +144 -0
- cecli/tools/extract_lines.py +281 -0
- cecli/tools/finished.py +35 -0
- cecli/tools/git_branch.py +132 -0
- cecli/tools/git_diff.py +49 -0
- cecli/tools/git_log.py +43 -0
- cecli/tools/git_remote.py +39 -0
- cecli/tools/git_show.py +37 -0
- cecli/tools/git_status.py +32 -0
- cecli/tools/grep.py +242 -0
- cecli/tools/indent_lines.py +195 -0
- cecli/tools/insert_block.py +263 -0
- cecli/tools/list_changes.py +71 -0
- cecli/tools/load_skill.py +51 -0
- cecli/tools/ls.py +77 -0
- cecli/tools/remove_skill.py +51 -0
- cecli/tools/replace_all.py +113 -0
- cecli/tools/replace_line.py +135 -0
- cecli/tools/replace_lines.py +180 -0
- cecli/tools/replace_text.py +186 -0
- cecli/tools/show_numbered_context.py +137 -0
- cecli/tools/thinking.py +52 -0
- cecli/tools/undo_change.py +82 -0
- cecli/tools/update_todo_list.py +148 -0
- cecli/tools/utils/base_tool.py +64 -0
- cecli/tools/utils/helpers.py +359 -0
- cecli/tools/utils/output.py +119 -0
- cecli/tools/utils/registry.py +145 -0
- cecli/tools/view_files_matching.py +138 -0
- cecli/tools/view_files_with_symbol.py +117 -0
- cecli/tui/__init__.py +83 -0
- cecli/tui/app.py +971 -0
- cecli/tui/io.py +566 -0
- cecli/tui/styles.tcss +117 -0
- cecli/tui/widgets/__init__.py +19 -0
- cecli/tui/widgets/completion_bar.py +331 -0
- cecli/tui/widgets/file_list.py +76 -0
- cecli/tui/widgets/footer.py +165 -0
- cecli/tui/widgets/input_area.py +320 -0
- cecli/tui/widgets/key_hints.py +16 -0
- cecli/tui/widgets/output.py +354 -0
- cecli/tui/widgets/status_bar.py +279 -0
- cecli/tui/worker.py +160 -0
- cecli/urls.py +16 -0
- cecli/utils.py +499 -0
- cecli/versioncheck.py +90 -0
- cecli/voice.py +90 -0
- cecli/waiting.py +38 -0
- cecli/watch.py +316 -0
- cecli/watch_prompts.py +12 -0
- cecli/website/Gemfile +8 -0
- cecli/website/_includes/blame.md +162 -0
- cecli/website/_includes/get-started.md +22 -0
- cecli/website/_includes/help-tip.md +5 -0
- cecli/website/_includes/help.md +24 -0
- cecli/website/_includes/install.md +5 -0
- cecli/website/_includes/keys.md +4 -0
- cecli/website/_includes/model-warnings.md +67 -0
- cecli/website/_includes/multi-line.md +22 -0
- cecli/website/_includes/python-m-aider.md +5 -0
- cecli/website/_includes/recording.css +228 -0
- cecli/website/_includes/recording.md +34 -0
- cecli/website/_includes/replit-pipx.md +9 -0
- cecli/website/_includes/works-best.md +1 -0
- cecli/website/_sass/custom/custom.scss +103 -0
- cecli/website/docs/config/adv-model-settings.md +2498 -0
- cecli/website/docs/config/agent-mode.md +320 -0
- cecli/website/docs/config/aider_conf.md +548 -0
- cecli/website/docs/config/api-keys.md +90 -0
- cecli/website/docs/config/custom-commands.md +187 -0
- cecli/website/docs/config/dotenv.md +493 -0
- cecli/website/docs/config/editor.md +127 -0
- cecli/website/docs/config/mcp.md +210 -0
- cecli/website/docs/config/model-aliases.md +173 -0
- cecli/website/docs/config/options.md +890 -0
- cecli/website/docs/config/reasoning.md +210 -0
- cecli/website/docs/config/skills.md +172 -0
- cecli/website/docs/config/tui.md +126 -0
- cecli/website/docs/config.md +44 -0
- cecli/website/docs/faq.md +379 -0
- cecli/website/docs/git.md +76 -0
- cecli/website/docs/index.md +47 -0
- cecli/website/docs/install/codespaces.md +39 -0
- cecli/website/docs/install/docker.md +48 -0
- cecli/website/docs/install/optional.md +100 -0
- cecli/website/docs/install/replit.md +8 -0
- cecli/website/docs/install.md +115 -0
- cecli/website/docs/languages.md +264 -0
- cecli/website/docs/legal/contributor-agreement.md +111 -0
- cecli/website/docs/legal/privacy.md +104 -0
- cecli/website/docs/llms/anthropic.md +77 -0
- cecli/website/docs/llms/azure.md +48 -0
- cecli/website/docs/llms/bedrock.md +132 -0
- cecli/website/docs/llms/cohere.md +34 -0
- cecli/website/docs/llms/deepseek.md +32 -0
- cecli/website/docs/llms/gemini.md +49 -0
- cecli/website/docs/llms/github.md +111 -0
- cecli/website/docs/llms/groq.md +36 -0
- cecli/website/docs/llms/lm-studio.md +39 -0
- cecli/website/docs/llms/ollama.md +75 -0
- cecli/website/docs/llms/openai-compat.md +39 -0
- cecli/website/docs/llms/openai.md +58 -0
- cecli/website/docs/llms/openrouter.md +78 -0
- cecli/website/docs/llms/other.md +117 -0
- cecli/website/docs/llms/vertex.md +50 -0
- cecli/website/docs/llms/warnings.md +10 -0
- cecli/website/docs/llms/xai.md +53 -0
- cecli/website/docs/llms.md +54 -0
- cecli/website/docs/more/analytics.md +127 -0
- cecli/website/docs/more/edit-formats.md +116 -0
- cecli/website/docs/more/infinite-output.md +192 -0
- cecli/website/docs/more-info.md +8 -0
- cecli/website/docs/recordings/auto-accept-architect.md +31 -0
- cecli/website/docs/recordings/dont-drop-original-read-files.md +35 -0
- cecli/website/docs/recordings/index.md +21 -0
- cecli/website/docs/recordings/model-accepts-settings.md +69 -0
- cecli/website/docs/recordings/tree-sitter-language-pack.md +80 -0
- cecli/website/docs/repomap.md +112 -0
- cecli/website/docs/scripting.md +100 -0
- cecli/website/docs/sessions.md +213 -0
- cecli/website/docs/troubleshooting/aider-not-found.md +24 -0
- cecli/website/docs/troubleshooting/edit-errors.md +76 -0
- cecli/website/docs/troubleshooting/imports.md +62 -0
- cecli/website/docs/troubleshooting/models-and-keys.md +54 -0
- cecli/website/docs/troubleshooting/support.md +79 -0
- cecli/website/docs/troubleshooting/token-limits.md +96 -0
- cecli/website/docs/troubleshooting/warnings.md +12 -0
- cecli/website/docs/troubleshooting.md +11 -0
- cecli/website/docs/usage/browser.md +57 -0
- cecli/website/docs/usage/caching.md +49 -0
- cecli/website/docs/usage/commands.md +133 -0
- cecli/website/docs/usage/conventions.md +119 -0
- cecli/website/docs/usage/copypaste.md +136 -0
- cecli/website/docs/usage/images-urls.md +48 -0
- cecli/website/docs/usage/lint-test.md +118 -0
- cecli/website/docs/usage/modes.md +211 -0
- cecli/website/docs/usage/not-code.md +179 -0
- cecli/website/docs/usage/notifications.md +87 -0
- cecli/website/docs/usage/tips.md +79 -0
- cecli/website/docs/usage/tutorials.md +30 -0
- cecli/website/docs/usage/voice.md +121 -0
- cecli/website/docs/usage/watch.md +294 -0
- cecli/website/docs/usage.md +102 -0
- cecli/website/share/index.md +101 -0
- cecli_dev-0.95.5.dist-info/METADATA +549 -0
- cecli_dev-0.95.5.dist-info/RECORD +366 -0
- cecli_dev-0.95.5.dist-info/WHEEL +5 -0
- cecli_dev-0.95.5.dist-info/entry_points.txt +4 -0
- cecli_dev-0.95.5.dist-info/licenses/LICENSE.txt +202 -0
- cecli_dev-0.95.5.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from typing import List
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
@dataclass
|
|
6
|
+
class ChatChunks:
|
|
7
|
+
system: List = field(default_factory=list)
|
|
8
|
+
static: List = field(default_factory=list)
|
|
9
|
+
examples: List = field(default_factory=list)
|
|
10
|
+
pre_message: List = field(default_factory=list)
|
|
11
|
+
done: List = field(default_factory=list)
|
|
12
|
+
repo: List = field(default_factory=list)
|
|
13
|
+
readonly_files: List = field(default_factory=list)
|
|
14
|
+
chat_files: List = field(default_factory=list)
|
|
15
|
+
edit_files: List = field(default_factory=list)
|
|
16
|
+
cur: List = field(default_factory=list)
|
|
17
|
+
post_message: List = field(default_factory=list)
|
|
18
|
+
reminder: List = field(default_factory=list)
|
|
19
|
+
chunk_ordering: List = field(default_factory=list)
|
|
20
|
+
|
|
21
|
+
def __init__(self, chunk_ordering=None):
|
|
22
|
+
self.chunk_ordering = chunk_ordering
|
|
23
|
+
|
|
24
|
+
def all_messages(self):
|
|
25
|
+
if self.chunk_ordering:
|
|
26
|
+
messages = []
|
|
27
|
+
for chunk_name in self.chunk_ordering:
|
|
28
|
+
chunk = getattr(self, chunk_name, [])
|
|
29
|
+
if chunk:
|
|
30
|
+
messages.extend(chunk)
|
|
31
|
+
return messages
|
|
32
|
+
else:
|
|
33
|
+
return (
|
|
34
|
+
self.format_list("system")
|
|
35
|
+
+ self.format_list("static")
|
|
36
|
+
+ self.format_list("examples")
|
|
37
|
+
+ self.format_list("readonly_files")
|
|
38
|
+
+ self.format_list("chat_files")
|
|
39
|
+
+ self.format_list("repo")
|
|
40
|
+
+ self.format_list("pre_message")
|
|
41
|
+
+ self.format_list("done")
|
|
42
|
+
+ self.format_list("edit_files")
|
|
43
|
+
+ self.format_list("cur")
|
|
44
|
+
+ self.format_list("post_message")
|
|
45
|
+
+ self.format_list("reminder")
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
def add_cache_control_headers(self):
|
|
49
|
+
# Limit to 4 cacheable blocks to appease Anthropic's limits on chunk caching
|
|
50
|
+
if self.format_list("readonly_files"):
|
|
51
|
+
self.add_cache_control(self.format_list("readonly_files"))
|
|
52
|
+
elif self.format_list("static"):
|
|
53
|
+
self.add_cache_control(self.format_list("static"))
|
|
54
|
+
elif self.format_list("examples"):
|
|
55
|
+
self.add_cache_control(self.format_list("examples"))
|
|
56
|
+
else:
|
|
57
|
+
self.add_cache_control(self.format_list("system"))
|
|
58
|
+
|
|
59
|
+
# The files form a cacheable block.
|
|
60
|
+
# The block starts with readonly_files and ends with chat_files.
|
|
61
|
+
# So we mark the end of chat_files.
|
|
62
|
+
# self.add_cache_control(self.add_cache_control(self.format_list("chat_files"))
|
|
63
|
+
|
|
64
|
+
# The repo map is its own cacheable block.
|
|
65
|
+
if self.format_list("repo"):
|
|
66
|
+
self.add_cache_control(self.format_list("repo"))
|
|
67
|
+
elif self.format_list("chat_files"):
|
|
68
|
+
self.add_cache_control(self.format_list("chat_files"))
|
|
69
|
+
|
|
70
|
+
# The history is ephemeral on its own.
|
|
71
|
+
self.add_cache_control(self.add_cache_control(self.format_list("cur")), penultimate=True)
|
|
72
|
+
|
|
73
|
+
# Per this: https://github.com/BerriAI/litellm/issues/10226
|
|
74
|
+
# The first and second to last messages are cache optimal
|
|
75
|
+
# Since caches are also written to incrementally and you need
|
|
76
|
+
# the past and current states to properly append and gain
|
|
77
|
+
# efficiencies/savings in cache writing
|
|
78
|
+
def add_cache_control(self, messages, penultimate=False):
|
|
79
|
+
if not messages:
|
|
80
|
+
return
|
|
81
|
+
|
|
82
|
+
if penultimate and len(messages) < 2:
|
|
83
|
+
content = messages[-2]["content"]
|
|
84
|
+
if type(content) is str:
|
|
85
|
+
content = dict(
|
|
86
|
+
type="text",
|
|
87
|
+
text=content,
|
|
88
|
+
)
|
|
89
|
+
content["cache_control"] = {"type": "ephemeral"}
|
|
90
|
+
|
|
91
|
+
messages[-2]["content"] = [content]
|
|
92
|
+
|
|
93
|
+
content = messages[-1]["content"]
|
|
94
|
+
if type(content) is str:
|
|
95
|
+
content = dict(
|
|
96
|
+
type="text",
|
|
97
|
+
text=content,
|
|
98
|
+
)
|
|
99
|
+
content["cache_control"] = {"type": "ephemeral"}
|
|
100
|
+
|
|
101
|
+
messages[-1]["content"] = [content]
|
|
102
|
+
|
|
103
|
+
def cacheable_messages(self):
|
|
104
|
+
messages = self.all_messages()
|
|
105
|
+
for i, message in enumerate(reversed(messages)):
|
|
106
|
+
if isinstance(message.get("content"), list) and message["content"][0].get(
|
|
107
|
+
"cache_control"
|
|
108
|
+
):
|
|
109
|
+
return messages[: len(messages) - i]
|
|
110
|
+
return messages
|
|
111
|
+
|
|
112
|
+
def format_list(self, chunk):
|
|
113
|
+
if type(getattr(self, chunk, [])) is not list:
|
|
114
|
+
return []
|
|
115
|
+
|
|
116
|
+
return getattr(self, chunk, [])
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
from .base_coder import Coder
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class ContextCoder(Coder):
|
|
5
|
+
"""Identify which files need to be edited for a given request."""
|
|
6
|
+
|
|
7
|
+
edit_format = "context"
|
|
8
|
+
prompt_format = "context"
|
|
9
|
+
|
|
10
|
+
def __init__(self, *args, **kwargs):
|
|
11
|
+
super().__init__(*args, **kwargs)
|
|
12
|
+
|
|
13
|
+
if not self.repo_map:
|
|
14
|
+
return
|
|
15
|
+
|
|
16
|
+
self.repo_map.refresh = "always"
|
|
17
|
+
self.repo_map.max_map_tokens *= self.repo_map.map_mul_no_files
|
|
18
|
+
self.repo_map.map_mul_no_files = 1.0
|
|
19
|
+
|
|
20
|
+
async def reply_completed(self):
|
|
21
|
+
content = self.partial_response_content
|
|
22
|
+
if not content or not content.strip():
|
|
23
|
+
return True
|
|
24
|
+
|
|
25
|
+
# dump(repr(content))
|
|
26
|
+
current_rel_fnames = set(self.get_inchat_relative_files())
|
|
27
|
+
mentioned_rel_fnames = set(self.get_file_mentions(content, ignore_current=True))
|
|
28
|
+
|
|
29
|
+
# dump(current_rel_fnames)
|
|
30
|
+
# dump(mentioned_rel_fnames)
|
|
31
|
+
# dump(current_rel_fnames == mentioned_rel_fnames)
|
|
32
|
+
|
|
33
|
+
if mentioned_rel_fnames == current_rel_fnames:
|
|
34
|
+
return True
|
|
35
|
+
|
|
36
|
+
if self.num_reflections >= self.max_reflections - 1:
|
|
37
|
+
return True
|
|
38
|
+
|
|
39
|
+
self.abs_fnames = set()
|
|
40
|
+
for fname in mentioned_rel_fnames:
|
|
41
|
+
self.add_rel_fname(fname)
|
|
42
|
+
# dump(self.get_inchat_relative_files())
|
|
43
|
+
|
|
44
|
+
self.reflected_message = self.gpt_prompts.try_again
|
|
45
|
+
|
|
46
|
+
# mentioned_idents = self.get_ident_mentions(cur_msg_text)
|
|
47
|
+
# if mentioned_idents:
|
|
48
|
+
|
|
49
|
+
return True
|
|
50
|
+
|
|
51
|
+
def check_for_file_mentions(self, content):
|
|
52
|
+
pass
|
|
@@ -0,0 +1,269 @@
|
|
|
1
|
+
import hashlib
|
|
2
|
+
import json
|
|
3
|
+
import math
|
|
4
|
+
import time
|
|
5
|
+
import uuid
|
|
6
|
+
|
|
7
|
+
from cecli.exceptions import LiteLLMExceptions
|
|
8
|
+
from cecli.llm import litellm
|
|
9
|
+
|
|
10
|
+
from .base_coder import Coder
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class CopyPasteCoder(Coder):
|
|
14
|
+
"""Coder implementation that performs clipboard-driven interactions.
|
|
15
|
+
|
|
16
|
+
This coder swaps the transport mechanism (clipboard vs API) but must remain compatible with the
|
|
17
|
+
base ``Coder`` interface. In particular, many base methods assume ``self.gpt_prompts`` exists.
|
|
18
|
+
|
|
19
|
+
We therefore mirror the prompt pack from the coder that matches the currently selected
|
|
20
|
+
``edit_format``.
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
# CopyPasteCoder doesn't have its own prompt format - it dynamically determines
|
|
24
|
+
# prompts based on the selected edit_format
|
|
25
|
+
prompt_format = None
|
|
26
|
+
|
|
27
|
+
def __init__(self, *args, **kwargs):
|
|
28
|
+
super().__init__(*args, **kwargs)
|
|
29
|
+
|
|
30
|
+
# Ensure CopyPasteCoder always has a prompt pack.
|
|
31
|
+
# We mirror prompts from the coder that matches the active edit format.
|
|
32
|
+
self._init_prompts_from_selected_edit_format()
|
|
33
|
+
|
|
34
|
+
@property
|
|
35
|
+
def gpt_prompts(self):
|
|
36
|
+
"""Override gpt_prompts property for CopyPasteCoder.
|
|
37
|
+
|
|
38
|
+
CopyPasteCoder dynamically determines prompts based on the selected edit format.
|
|
39
|
+
This property returns the prompts that were set by _init_prompts_from_selected_edit_format().
|
|
40
|
+
"""
|
|
41
|
+
if not hasattr(self, "_gpt_prompts"):
|
|
42
|
+
raise AttributeError(
|
|
43
|
+
"CopyPasteCoder must call _init_prompts_from_selected_edit_format() "
|
|
44
|
+
"before accessing gpt_prompts"
|
|
45
|
+
)
|
|
46
|
+
return self._gpt_prompts
|
|
47
|
+
|
|
48
|
+
@gpt_prompts.setter
|
|
49
|
+
def gpt_prompts(self, value):
|
|
50
|
+
"""Setter for gpt_prompts property."""
|
|
51
|
+
self._gpt_prompts = value
|
|
52
|
+
|
|
53
|
+
def _init_prompts_from_selected_edit_format(self):
|
|
54
|
+
"""Initialize ``self.gpt_prompts`` based on the currently selected edit format.
|
|
55
|
+
|
|
56
|
+
This prevents AttributeError crashes when base ``Coder`` code assumes ``self.gpt_prompts``
|
|
57
|
+
exists (eg during message formatting, announcements, cancellation/cleanup paths, etc).
|
|
58
|
+
"""
|
|
59
|
+
# Determine the selected edit_format the same way Coder.create() does.
|
|
60
|
+
selected_edit_format = None
|
|
61
|
+
if getattr(self, "args", None) is not None and getattr(self.args, "edit_format", None):
|
|
62
|
+
selected_edit_format = self.args.edit_format
|
|
63
|
+
else:
|
|
64
|
+
selected_edit_format = getattr(self.main_model, "edit_format", None)
|
|
65
|
+
|
|
66
|
+
# "code" is treated like None in Coder.create()
|
|
67
|
+
if selected_edit_format == "code":
|
|
68
|
+
selected_edit_format = None
|
|
69
|
+
|
|
70
|
+
# If no edit format is selected, fall back to model default.
|
|
71
|
+
if selected_edit_format is None:
|
|
72
|
+
selected_edit_format = getattr(self.main_model, "edit_format", None)
|
|
73
|
+
|
|
74
|
+
# Find the coder class that would have been selected for this edit_format.
|
|
75
|
+
try:
|
|
76
|
+
import cecli.coders as coders
|
|
77
|
+
except Exception:
|
|
78
|
+
coders = None
|
|
79
|
+
|
|
80
|
+
target_coder_class = None
|
|
81
|
+
if coders is not None:
|
|
82
|
+
for coder_cls in getattr(coders, "__all__", []):
|
|
83
|
+
if (
|
|
84
|
+
hasattr(coder_cls, "edit_format")
|
|
85
|
+
and coder_cls.edit_format == selected_edit_format
|
|
86
|
+
):
|
|
87
|
+
target_coder_class = coder_cls
|
|
88
|
+
break
|
|
89
|
+
|
|
90
|
+
# Mirror prompt pack + edit_format where available.
|
|
91
|
+
if target_coder_class is not None:
|
|
92
|
+
# All coder classes must have prompt_format attribute
|
|
93
|
+
if (
|
|
94
|
+
not hasattr(target_coder_class, "prompt_format")
|
|
95
|
+
or target_coder_class.prompt_format is None
|
|
96
|
+
):
|
|
97
|
+
raise AttributeError(
|
|
98
|
+
f"Target coder class {target_coder_class.__name__} must have a 'prompt_format'"
|
|
99
|
+
" attribute."
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
prompt_name = target_coder_class.prompt_format
|
|
103
|
+
|
|
104
|
+
# Get prompts from cache or load them
|
|
105
|
+
if prompt_name in Coder._prompt_cache:
|
|
106
|
+
self.gpt_prompts = Coder._prompt_cache[prompt_name]
|
|
107
|
+
else:
|
|
108
|
+
# Create a dummy instance to trigger prompt loading
|
|
109
|
+
dummy_instance = target_coder_class.__new__(target_coder_class)
|
|
110
|
+
dummy_instance.__class__ = target_coder_class
|
|
111
|
+
self.gpt_prompts = dummy_instance.gpt_prompts
|
|
112
|
+
|
|
113
|
+
# Keep announcements/formatting consistent with the selected coder.
|
|
114
|
+
self.edit_format = getattr(target_coder_class, "edit_format", self.edit_format)
|
|
115
|
+
return
|
|
116
|
+
|
|
117
|
+
# Last-resort fallback: avoid crashing if we can't determine the prompts.
|
|
118
|
+
# Prefer keeping any existing gpt_prompts (if one was set elsewhere).
|
|
119
|
+
if not hasattr(self, "_gpt_prompts"):
|
|
120
|
+
self.gpt_prompts = None
|
|
121
|
+
|
|
122
|
+
async def send(self, messages, model=None, functions=None, tools=None):
|
|
123
|
+
model = model or self.main_model
|
|
124
|
+
|
|
125
|
+
if getattr(model, "copy_paste_transport", "api") == "api":
|
|
126
|
+
async for chunk in super().send(
|
|
127
|
+
messages, model=model, functions=functions, tools=tools
|
|
128
|
+
):
|
|
129
|
+
yield chunk
|
|
130
|
+
return
|
|
131
|
+
|
|
132
|
+
if functions:
|
|
133
|
+
self.io.tool_warning("copy/paste mode ignores function call requests.")
|
|
134
|
+
if tools:
|
|
135
|
+
self.io.tool_warning("copy/paste mode ignores tool call requests.")
|
|
136
|
+
|
|
137
|
+
self.io.reset_streaming_response()
|
|
138
|
+
|
|
139
|
+
# Base Coder methods (eg show_send_output/preprocess_response) expect these streaming
|
|
140
|
+
# attributes to always exist, even when we bypass the normal API streaming path.
|
|
141
|
+
self.partial_response_content = ""
|
|
142
|
+
self.partial_response_function_call = None
|
|
143
|
+
# preprocess_response() does len(self.partial_response_tool_calls), so it must not be None.
|
|
144
|
+
self.partial_response_tool_calls = []
|
|
145
|
+
|
|
146
|
+
try:
|
|
147
|
+
hash_object, completion = self.copy_paste_completion(messages, model)
|
|
148
|
+
self.chat_completion_call_hashes.append(hash_object.hexdigest())
|
|
149
|
+
self.show_send_output(completion)
|
|
150
|
+
self.calculate_and_show_tokens_and_cost(messages, completion)
|
|
151
|
+
finally:
|
|
152
|
+
self.preprocess_response()
|
|
153
|
+
|
|
154
|
+
if self.partial_response_content:
|
|
155
|
+
self.io.ai_output(self.partial_response_content)
|
|
156
|
+
|
|
157
|
+
def copy_paste_completion(self, messages, model):
|
|
158
|
+
try:
|
|
159
|
+
from cecli.helpers import copypaste
|
|
160
|
+
except ImportError: # pragma: no cover - import error path
|
|
161
|
+
self.io.tool_error("copy/paste mode requires the pyperclip package.")
|
|
162
|
+
self.io.tool_output("Install it with: pip install pyperclip")
|
|
163
|
+
raise
|
|
164
|
+
|
|
165
|
+
def content_to_text(content):
|
|
166
|
+
"""Extract text from the various content formats cecli/LLMs can produce."""
|
|
167
|
+
if not content:
|
|
168
|
+
return ""
|
|
169
|
+
if isinstance(content, str):
|
|
170
|
+
return content
|
|
171
|
+
if isinstance(content, list):
|
|
172
|
+
parts = []
|
|
173
|
+
for part in content:
|
|
174
|
+
if isinstance(part, dict):
|
|
175
|
+
text = part.get("text")
|
|
176
|
+
if isinstance(text, str):
|
|
177
|
+
parts.append(text)
|
|
178
|
+
elif isinstance(part, str):
|
|
179
|
+
parts.append(part)
|
|
180
|
+
return "".join(parts)
|
|
181
|
+
if isinstance(content, dict):
|
|
182
|
+
text = content.get("text")
|
|
183
|
+
if isinstance(text, str):
|
|
184
|
+
return text
|
|
185
|
+
return ""
|
|
186
|
+
return str(content)
|
|
187
|
+
|
|
188
|
+
lines = []
|
|
189
|
+
for message in messages:
|
|
190
|
+
text_content = content_to_text(message.get("content"))
|
|
191
|
+
if not text_content:
|
|
192
|
+
continue
|
|
193
|
+
role = message.get("role")
|
|
194
|
+
if role:
|
|
195
|
+
lines.append(f"{role.upper()}:\n{text_content}")
|
|
196
|
+
else:
|
|
197
|
+
lines.append(text_content)
|
|
198
|
+
|
|
199
|
+
prompt_text = "\n\n".join(lines).strip()
|
|
200
|
+
|
|
201
|
+
try:
|
|
202
|
+
copypaste.copy_to_clipboard(prompt_text)
|
|
203
|
+
except copypaste.ClipboardError as err: # pragma: no cover - clipboard error path
|
|
204
|
+
self.io.tool_error(f"Unable to copy prompt to clipboard: {err}")
|
|
205
|
+
raise
|
|
206
|
+
|
|
207
|
+
self.io.tool_output("Request copied to clipboard.")
|
|
208
|
+
self.io.tool_output("Paste it into your LLM interface, then copy the reply back.")
|
|
209
|
+
self.io.tool_output("Waiting for clipboard updates (Ctrl+C to cancel)...")
|
|
210
|
+
|
|
211
|
+
try:
|
|
212
|
+
last_value = copypaste.read_clipboard()
|
|
213
|
+
except copypaste.ClipboardError as err: # pragma: no cover - clipboard error path
|
|
214
|
+
self.io.tool_error(f"Unable to read clipboard: {err}")
|
|
215
|
+
raise
|
|
216
|
+
|
|
217
|
+
try:
|
|
218
|
+
response_text = copypaste.wait_for_clipboard_change(initial=last_value)
|
|
219
|
+
except copypaste.ClipboardError as err: # pragma: no cover - clipboard error path
|
|
220
|
+
self.io.tool_error(f"Unable to read clipboard: {err}")
|
|
221
|
+
raise
|
|
222
|
+
|
|
223
|
+
# Estimate tokens locally using the model's tokenizer; fallback to heuristic.
|
|
224
|
+
def _safe_token_count(text):
|
|
225
|
+
"""Return token count via the model tokenizer, falling back to a heuristic."""
|
|
226
|
+
if not text:
|
|
227
|
+
return 0
|
|
228
|
+
try:
|
|
229
|
+
count = model.token_count(text)
|
|
230
|
+
if isinstance(count, int) and count >= 0:
|
|
231
|
+
return count
|
|
232
|
+
except Exception as ex:
|
|
233
|
+
# Try to map known LiteLLM exceptions to user-friendly messages, then fall back.
|
|
234
|
+
try:
|
|
235
|
+
ex_info = LiteLLMExceptions().get_ex_info(ex)
|
|
236
|
+
if ex_info and ex_info.description:
|
|
237
|
+
self.io.tool_warning(
|
|
238
|
+
f"Token count failed: {ex_info.description} Falling back to heuristic."
|
|
239
|
+
)
|
|
240
|
+
except Exception:
|
|
241
|
+
# Avoid masking the original issue during error mapping.
|
|
242
|
+
pass
|
|
243
|
+
return int(math.ceil(len(text) / 4))
|
|
244
|
+
|
|
245
|
+
prompt_tokens = _safe_token_count(prompt_text)
|
|
246
|
+
completion_tokens = _safe_token_count(response_text)
|
|
247
|
+
total_tokens = prompt_tokens + completion_tokens
|
|
248
|
+
|
|
249
|
+
completion = litellm.ModelResponse(
|
|
250
|
+
id=f"chatcmpl-{uuid.uuid4()}",
|
|
251
|
+
choices=[
|
|
252
|
+
litellm.Choices(
|
|
253
|
+
index=0,
|
|
254
|
+
finish_reason="stop",
|
|
255
|
+
message=litellm.Message(role="assistant", content=response_text),
|
|
256
|
+
)
|
|
257
|
+
],
|
|
258
|
+
created=int(time.time()),
|
|
259
|
+
model=model.name,
|
|
260
|
+
usage={
|
|
261
|
+
"prompt_tokens": prompt_tokens,
|
|
262
|
+
"completion_tokens": completion_tokens,
|
|
263
|
+
"total_tokens": total_tokens,
|
|
264
|
+
},
|
|
265
|
+
)
|
|
266
|
+
|
|
267
|
+
kwargs = dict(model=model.name, messages=messages, stream=False)
|
|
268
|
+
hash_object = hashlib.sha1(json.dumps(kwargs, sort_keys=True).encode()) # nosec B324
|
|
269
|
+
return hash_object, completion
|