llm-shell 0.9.2 → 0.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +61 -66
- data/lib/llm/shell/command.rb +40 -40
- data/lib/llm/shell/commands/clear_screen.rb +4 -18
- data/lib/llm/shell/commands/debug_mode.rb +12 -0
- data/lib/llm/shell/commands/dir_import.rb +4 -20
- data/lib/llm/shell/commands/disable_tool.rb +33 -0
- data/lib/llm/shell/commands/enable_tool.rb +33 -0
- data/lib/llm/shell/commands/file_import.rb +4 -20
- data/lib/llm/shell/commands/help.rb +23 -36
- data/lib/llm/shell/commands/show_chat.rb +4 -19
- data/lib/llm/shell/commands/show_version.rb +4 -20
- data/lib/llm/shell/commands/system_prompt.rb +4 -18
- data/lib/llm/shell/completion.rb +5 -5
- data/lib/llm/shell/config.rb +4 -5
- data/lib/llm/shell/formatter.rb +1 -2
- data/lib/llm/shell/internal/coderay/lib/coderay/duo.rb +81 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/_map.rb +17 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/comment_filter.rb +25 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/count.rb +39 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/debug.rb +49 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/debug_lint.rb +63 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/div.rb +23 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/encoder.rb +190 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/filter.rb +58 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/html/css.rb +65 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/html/numbering.rb +108 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/html/output.rb +164 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/html.rb +333 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/json.rb +83 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/lines_of_code.rb +45 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/lint.rb +59 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/null.rb +18 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/page.rb +24 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/span.rb +23 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/statistic.rb +95 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/terminal.rb +195 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/text.rb +46 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/token_kind_filter.rb +111 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/xml.rb +72 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders/yaml.rb +50 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/encoders.rb +18 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/for_redcloth.rb +95 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/helpers/file_type.rb +151 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/helpers/plugin.rb +55 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/helpers/plugin_host.rb +221 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/helpers/word_list.rb +72 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/_map.rb +24 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/c.rb +189 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/clojure.rb +217 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/cpp.rb +217 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/css.rb +196 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/debug.rb +75 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/delphi.rb +144 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/diff.rb +221 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/erb.rb +81 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/go.rb +208 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/groovy.rb +268 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/haml.rb +168 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/html.rb +275 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/java/builtin_types.rb +421 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/java.rb +174 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/java_script.rb +236 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/json.rb +98 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/lua.rb +280 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/php.rb +527 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/python.rb +287 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/raydebug.rb +75 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/ruby/patterns.rb +178 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/ruby/string_state.rb +79 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/ruby.rb +477 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/sass.rb +232 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/scanner.rb +337 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/sql.rb +169 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/taskpaper.rb +36 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/text.rb +26 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/xml.rb +17 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners/yaml.rb +140 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/scanners.rb +27 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/styles/_map.rb +7 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/styles/alpha.rb +153 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/styles/style.rb +18 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/styles.rb +15 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/token_kinds.rb +85 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/tokens.rb +164 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/tokens_proxy.rb +55 -0
- data/lib/llm/shell/internal/coderay/lib/coderay/version.rb +3 -0
- data/lib/llm/shell/internal/coderay/lib/coderay.rb +284 -0
- data/lib/llm/shell/internal/io-line/lib/io/line/multiple.rb +19 -0
- data/lib/{io → llm/shell/internal/io-line/lib/io}/line.rb +2 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/bot/builder.rb +31 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/bot/conversable.rb +37 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/bot/prompt/completion.rb +49 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/bot/prompt/respond.rb +49 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/bot.rb +150 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/buffer.rb +162 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/client.rb +36 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/error.rb +49 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/eventhandler.rb +44 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/eventstream/event.rb +69 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/eventstream/parser.rb +88 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/eventstream.rb +8 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/file.rb +91 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/function.rb +177 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/message.rb +178 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/mime.rb +140 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/multipart.rb +101 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/object/builder.rb +38 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/object/kernel.rb +53 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/object.rb +89 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/provider.rb +352 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/error_handler.rb +36 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/files.rb +155 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/format/completion_format.rb +88 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/format.rb +29 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/models.rb +54 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/response/completion.rb +39 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/response/enumerable.rb +11 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/response/file.rb +23 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/response/web_search.rb +21 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/stream_parser.rb +66 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic.rb +138 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/deepseek/format/completion_format.rb +68 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/deepseek/format.rb +27 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/deepseek.rb +75 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/audio.rb +73 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/error_handler.rb +47 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/files.rb +146 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/format/completion_format.rb +69 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/format.rb +39 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/images.rb +133 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/models.rb +60 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/response/completion.rb +35 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/response/embedding.rb +8 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/response/file.rb +11 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/response/files.rb +15 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/response/image.rb +31 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/response/models.rb +15 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/response/web_search.rb +22 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/stream_parser.rb +86 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini.rb +173 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/llamacpp.rb +74 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/ollama/error_handler.rb +36 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/ollama/format/completion_format.rb +77 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/ollama/format.rb +29 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/ollama/models.rb +56 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/ollama/response/completion.rb +28 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/ollama/response/embedding.rb +9 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/ollama/stream_parser.rb +44 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/ollama.rb +116 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/audio.rb +91 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/error_handler.rb +46 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/files.rb +134 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/format/completion_format.rb +90 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/format/moderation_format.rb +35 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/format/respond_format.rb +72 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/format.rb +54 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/images.rb +109 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/models.rb +55 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/moderations.rb +65 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/response/audio.rb +7 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/response/completion.rb +40 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/response/embedding.rb +9 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/response/enumerable.rb +23 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/response/file.rb +7 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/response/image.rb +16 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/response/moderations.rb +34 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/response/responds.rb +48 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/response/web_search.rb +21 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/responses/stream_parser.rb +76 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/responses.rb +99 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/stream_parser.rb +86 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/vector_stores.rb +228 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai.rb +206 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/xai/images.rb +58 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/xai.rb +72 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/providers/zai.rb +74 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/response.rb +67 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/schema/array.rb +26 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/schema/boolean.rb +13 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/schema/integer.rb +43 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/schema/leaf.rb +78 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/schema/null.rb +13 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/schema/number.rb +43 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/schema/object.rb +41 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/schema/string.rb +34 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/schema/version.rb +8 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/schema.rb +81 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/server_tool.rb +32 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/tool/param.rb +75 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/tool.rb +78 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/utils.rb +19 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm/version.rb +5 -0
- data/lib/llm/shell/internal/llm.rb/lib/llm.rb +121 -0
- data/lib/llm/shell/internal/optparse/lib/optionparser.rb +2 -0
- data/lib/llm/shell/internal/optparse/lib/optparse/ac.rb +70 -0
- data/lib/llm/shell/internal/optparse/lib/optparse/date.rb +18 -0
- data/lib/llm/shell/internal/optparse/lib/optparse/kwargs.rb +27 -0
- data/lib/llm/shell/internal/optparse/lib/optparse/shellwords.rb +7 -0
- data/lib/llm/shell/internal/optparse/lib/optparse/time.rb +11 -0
- data/lib/llm/shell/internal/optparse/lib/optparse/uri.rb +7 -0
- data/lib/llm/shell/internal/optparse/lib/optparse/version.rb +80 -0
- data/lib/llm/shell/internal/optparse/lib/optparse.rb +2469 -0
- data/lib/llm/shell/internal/paint/lib/paint/constants.rb +104 -0
- data/lib/llm/shell/internal/paint/lib/paint/pa.rb +13 -0
- data/lib/llm/shell/internal/paint/lib/paint/rgb_colors.rb +14 -0
- data/lib/llm/shell/internal/paint/lib/paint/shortcuts.rb +100 -0
- data/lib/llm/shell/internal/paint/lib/paint/shortcuts_version.rb +5 -0
- data/lib/llm/shell/internal/paint/lib/paint/util.rb +16 -0
- data/lib/llm/shell/internal/paint/lib/paint/version.rb +5 -0
- data/lib/llm/shell/internal/paint/lib/paint.rb +261 -0
- data/lib/llm/shell/internal/reline/lib/reline/config.rb +378 -0
- data/lib/llm/shell/internal/reline/lib/reline/face.rb +199 -0
- data/lib/llm/shell/internal/reline/lib/reline/history.rb +76 -0
- data/lib/llm/shell/internal/reline/lib/reline/io/ansi.rb +322 -0
- data/lib/llm/shell/internal/reline/lib/reline/io/dumb.rb +120 -0
- data/lib/llm/shell/internal/reline/lib/reline/io/windows.rb +530 -0
- data/lib/llm/shell/internal/reline/lib/reline/io.rb +55 -0
- data/lib/llm/shell/internal/reline/lib/reline/key_actor/base.rb +37 -0
- data/lib/llm/shell/internal/reline/lib/reline/key_actor/composite.rb +17 -0
- data/lib/llm/shell/internal/reline/lib/reline/key_actor/emacs.rb +517 -0
- data/lib/llm/shell/internal/reline/lib/reline/key_actor/vi_command.rb +518 -0
- data/lib/llm/shell/internal/reline/lib/reline/key_actor/vi_insert.rb +517 -0
- data/lib/llm/shell/internal/reline/lib/reline/key_actor.rb +8 -0
- data/lib/llm/shell/internal/reline/lib/reline/key_stroke.rb +119 -0
- data/lib/llm/shell/internal/reline/lib/reline/kill_ring.rb +125 -0
- data/lib/llm/shell/internal/reline/lib/reline/line_editor.rb +2356 -0
- data/lib/llm/shell/internal/reline/lib/reline/unicode/east_asian_width.rb +1292 -0
- data/lib/llm/shell/internal/reline/lib/reline/unicode.rb +421 -0
- data/lib/llm/shell/internal/reline/lib/reline/version.rb +3 -0
- data/lib/llm/shell/internal/reline/lib/reline.rb +527 -0
- data/lib/llm/shell/internal/tomlrb/lib/tomlrb/generated_parser.rb +712 -0
- data/lib/llm/shell/internal/tomlrb/lib/tomlrb/handler.rb +268 -0
- data/lib/llm/shell/internal/tomlrb/lib/tomlrb/local_date.rb +35 -0
- data/lib/llm/shell/internal/tomlrb/lib/tomlrb/local_date_time.rb +42 -0
- data/lib/llm/shell/internal/tomlrb/lib/tomlrb/local_time.rb +40 -0
- data/lib/llm/shell/internal/tomlrb/lib/tomlrb/parser.rb +21 -0
- data/lib/llm/shell/internal/tomlrb/lib/tomlrb/scanner.rb +92 -0
- data/lib/llm/shell/internal/tomlrb/lib/tomlrb/string_utils.rb +40 -0
- data/lib/llm/shell/internal/tomlrb/lib/tomlrb/version.rb +5 -0
- data/lib/llm/shell/internal/tomlrb/lib/tomlrb.rb +49 -0
- data/lib/llm/shell/options.rb +1 -1
- data/lib/llm/shell/renderer.rb +2 -3
- data/lib/llm/shell/repl.rb +21 -16
- data/lib/llm/shell/tool.rb +42 -0
- data/lib/llm/shell/tools/read_file.rb +15 -0
- data/lib/llm/shell/tools/system.rb +17 -0
- data/lib/llm/shell/tools/write_file.rb +16 -0
- data/lib/llm/shell/version.rb +1 -1
- data/lib/llm/shell.rb +83 -39
- data/libexec/llm-shell/shell +4 -6
- data/llm-shell.gemspec +0 -4
- metadata +233 -63
- data/lib/llm/function.rb +0 -17
- data/lib/llm/shell/command/extension.rb +0 -42
- data/lib/llm/shell/commands/utils.rb +0 -21
- data/lib/llm/shell/functions/read_file.rb +0 -22
- data/lib/llm/shell/functions/write_file.rb +0 -22
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
##
|
|
4
|
+
# The {LLM::Object LLM::Object} class encapsulates a Hash object. It is
|
|
5
|
+
# similar in spirit to OpenStruct, and it was introduced after OpenStruct
|
|
6
|
+
# became a bundled gem rather than a default gem in Ruby 3.5.
|
|
7
|
+
class LLM::Object < BasicObject
|
|
8
|
+
require_relative "object/builder"
|
|
9
|
+
require_relative "object/kernel"
|
|
10
|
+
|
|
11
|
+
extend Builder
|
|
12
|
+
include Kernel
|
|
13
|
+
include ::Enumerable
|
|
14
|
+
defined?(::PP) ? include(::PP::ObjectMixin) : nil
|
|
15
|
+
|
|
16
|
+
##
|
|
17
|
+
# @param [Hash] h
|
|
18
|
+
# @return [LLM::Object]
|
|
19
|
+
def initialize(h = {})
|
|
20
|
+
@h = h.transform_keys(&:to_sym) || h
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
##
|
|
24
|
+
# Yields a key|value pair to a block.
|
|
25
|
+
# @yieldparam [Symbol] k
|
|
26
|
+
# @yieldparam [Object] v
|
|
27
|
+
# @return [void]
|
|
28
|
+
def each(&)
|
|
29
|
+
@h.each(&)
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
##
|
|
33
|
+
# @param [Symbol, #to_sym] k
|
|
34
|
+
# @return [Object]
|
|
35
|
+
def [](k)
|
|
36
|
+
@h[k.to_sym]
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
##
|
|
40
|
+
# @param [Symbol, #to_sym] k
|
|
41
|
+
# @param [Object] v
|
|
42
|
+
# @return [void]
|
|
43
|
+
def []=(k, v)
|
|
44
|
+
@h[k.to_sym] = v
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
##
|
|
48
|
+
# @return [String]
|
|
49
|
+
def to_json(...)
|
|
50
|
+
to_h.to_json(...)
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
##
|
|
54
|
+
# @return [Boolean]
|
|
55
|
+
def empty?
|
|
56
|
+
@h.empty?
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
##
|
|
60
|
+
# @return [Hash]
|
|
61
|
+
def to_h
|
|
62
|
+
@h
|
|
63
|
+
end
|
|
64
|
+
alias_method :to_hash, :to_h
|
|
65
|
+
|
|
66
|
+
##
|
|
67
|
+
# @return [Object, nil]
|
|
68
|
+
def dig(...)
|
|
69
|
+
to_h.dig(...)
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
##
|
|
73
|
+
# @return [Hash]
|
|
74
|
+
def slice(...)
|
|
75
|
+
to_h.slice(...)
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
private
|
|
79
|
+
|
|
80
|
+
def method_missing(m, *args, &b)
|
|
81
|
+
if m.to_s.end_with?("=")
|
|
82
|
+
@h[m[0..-2].to_sym] = args.first
|
|
83
|
+
elsif @h.key?(m)
|
|
84
|
+
@h[m]
|
|
85
|
+
else
|
|
86
|
+
nil
|
|
87
|
+
end
|
|
88
|
+
end
|
|
89
|
+
end
|
|
@@ -0,0 +1,352 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
##
|
|
4
|
+
# The Provider class represents an abstract class for
|
|
5
|
+
# LLM (Language Model) providers.
|
|
6
|
+
#
|
|
7
|
+
# @abstract
|
|
8
|
+
class LLM::Provider
|
|
9
|
+
require "net/http"
|
|
10
|
+
require_relative "client"
|
|
11
|
+
include LLM::Client
|
|
12
|
+
|
|
13
|
+
@@clients = {}
|
|
14
|
+
|
|
15
|
+
##
|
|
16
|
+
# @api private
|
|
17
|
+
def self.clients = @@clients
|
|
18
|
+
|
|
19
|
+
##
|
|
20
|
+
# @param [String, nil] key
|
|
21
|
+
# The secret key for authentication
|
|
22
|
+
# @param [String] host
|
|
23
|
+
# The host address of the LLM provider
|
|
24
|
+
# @param [Integer] port
|
|
25
|
+
# The port number
|
|
26
|
+
# @param [Integer] timeout
|
|
27
|
+
# The number of seconds to wait for a response
|
|
28
|
+
# @param [Boolean] ssl
|
|
29
|
+
# Whether to use SSL for the connection
|
|
30
|
+
# @param [Boolean] persistent
|
|
31
|
+
# Whether to use a persistent connection.
|
|
32
|
+
# Requires the net-http-persistent gem.
|
|
33
|
+
def initialize(key:, host:, port: 443, timeout: 60, ssl: true, persistent: false)
|
|
34
|
+
@key = key
|
|
35
|
+
@host = host
|
|
36
|
+
@port = port
|
|
37
|
+
@timeout = timeout
|
|
38
|
+
@ssl = ssl
|
|
39
|
+
@client = persistent ? persistent_client : transient_client
|
|
40
|
+
@base_uri = URI("#{ssl ? "https" : "http"}://#{host}:#{port}/")
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
##
|
|
44
|
+
# Returns an inspection of the provider object
|
|
45
|
+
# @return [String]
|
|
46
|
+
# @note The secret key is redacted in inspect for security reasons
|
|
47
|
+
def inspect
|
|
48
|
+
"#<#{self.class.name}:0x#{object_id.to_s(16)} @key=[REDACTED] @http=#{@http.inspect}>"
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
##
|
|
52
|
+
# Provides an embedding
|
|
53
|
+
# @param [String, Array<String>] input
|
|
54
|
+
# The input to embed
|
|
55
|
+
# @param [String] model
|
|
56
|
+
# The embedding model to use
|
|
57
|
+
# @param [Hash] params
|
|
58
|
+
# Other embedding parameters
|
|
59
|
+
# @raise [NotImplementedError]
|
|
60
|
+
# When the method is not implemented by a subclass
|
|
61
|
+
# @return [LLM::Response]
|
|
62
|
+
def embed(input, model: nil, **params)
|
|
63
|
+
raise NotImplementedError
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
##
|
|
67
|
+
# Provides an interface to the chat completions API
|
|
68
|
+
# @example
|
|
69
|
+
# llm = LLM.openai(key: ENV["KEY"])
|
|
70
|
+
# messages = [{role: "system", content: "Your task is to answer all of my questions"}]
|
|
71
|
+
# res = llm.complete("5 + 2 ?", messages:)
|
|
72
|
+
# print "[#{res.choices[0].role}]", res.choices[0].content, "\n"
|
|
73
|
+
# @param [String] prompt
|
|
74
|
+
# The input prompt to be completed
|
|
75
|
+
# @param [Hash] params
|
|
76
|
+
# The parameters to maintain throughout the conversation.
|
|
77
|
+
# Any parameter the provider supports can be included and
|
|
78
|
+
# not only those listed here.
|
|
79
|
+
# @option params [Symbol] :role Defaults to the provider's default role
|
|
80
|
+
# @option params [String] :model Defaults to the provider's default model
|
|
81
|
+
# @option params [#to_json, nil] :schema Defaults to nil
|
|
82
|
+
# @option params [Array<LLM::Function>, nil] :tools Defaults to nil
|
|
83
|
+
# @raise [NotImplementedError]
|
|
84
|
+
# When the method is not implemented by a subclass
|
|
85
|
+
# @return [LLM::Response]
|
|
86
|
+
def complete(prompt, params = {})
|
|
87
|
+
raise NotImplementedError
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
##
|
|
91
|
+
# Starts a new chat powered by the chat completions API
|
|
92
|
+
# @param prompt (see LLM::Provider#complete)
|
|
93
|
+
# @param params (see LLM::Provider#complete)
|
|
94
|
+
# @return [LLM::Bot]
|
|
95
|
+
def chat(prompt, params = {})
|
|
96
|
+
role = params.delete(:role)
|
|
97
|
+
LLM::Bot.new(self, params).chat(prompt, role:)
|
|
98
|
+
end
|
|
99
|
+
|
|
100
|
+
##
|
|
101
|
+
# Starts a new chat powered by the responses API
|
|
102
|
+
# @param prompt (see LLM::Provider#complete)
|
|
103
|
+
# @param params (see LLM::Provider#complete)
|
|
104
|
+
# @raise (see LLM::Provider#complete)
|
|
105
|
+
# @return [LLM::Bot]
|
|
106
|
+
def respond(prompt, params = {})
|
|
107
|
+
role = params.delete(:role)
|
|
108
|
+
LLM::Bot.new(self, params).respond(prompt, role:)
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
##
|
|
112
|
+
# @note
|
|
113
|
+
# Compared to the chat completions API, the responses API
|
|
114
|
+
# can require less bandwidth on each turn, maintain state
|
|
115
|
+
# server-side, and produce faster responses.
|
|
116
|
+
# @return [LLM::OpenAI::Responses]
|
|
117
|
+
# Returns an interface to the responses API
|
|
118
|
+
def responses
|
|
119
|
+
raise NotImplementedError
|
|
120
|
+
end
|
|
121
|
+
|
|
122
|
+
##
|
|
123
|
+
# @return [LLM::OpenAI::Images, LLM::Gemini::Images]
|
|
124
|
+
# Returns an interface to the images API
|
|
125
|
+
def images
|
|
126
|
+
raise NotImplementedError
|
|
127
|
+
end
|
|
128
|
+
|
|
129
|
+
##
|
|
130
|
+
# @return [LLM::OpenAI::Audio]
|
|
131
|
+
# Returns an interface to the audio API
|
|
132
|
+
def audio
|
|
133
|
+
raise NotImplementedError
|
|
134
|
+
end
|
|
135
|
+
|
|
136
|
+
##
|
|
137
|
+
# @return [LLM::OpenAI::Files]
|
|
138
|
+
# Returns an interface to the files API
|
|
139
|
+
def files
|
|
140
|
+
raise NotImplementedError
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
##
|
|
144
|
+
# @return [LLM::OpenAI::Models]
|
|
145
|
+
# Returns an interface to the models API
|
|
146
|
+
def models
|
|
147
|
+
raise NotImplementedError
|
|
148
|
+
end
|
|
149
|
+
|
|
150
|
+
##
|
|
151
|
+
# @return [LLM::OpenAI::Moderations]
|
|
152
|
+
# Returns an interface to the moderations API
|
|
153
|
+
def moderations
|
|
154
|
+
raise NotImplementedError
|
|
155
|
+
end
|
|
156
|
+
|
|
157
|
+
##
|
|
158
|
+
# @return [LLM::OpenAI::VectorStore]
|
|
159
|
+
# Returns an interface to the vector stores API
|
|
160
|
+
def vector_stores
|
|
161
|
+
raise NotImplementedError
|
|
162
|
+
end
|
|
163
|
+
|
|
164
|
+
##
|
|
165
|
+
# @return [String]
|
|
166
|
+
# Returns the role of the assistant in the conversation.
|
|
167
|
+
# Usually "assistant" or "model"
|
|
168
|
+
def assistant_role
|
|
169
|
+
raise NotImplementedError
|
|
170
|
+
end
|
|
171
|
+
|
|
172
|
+
##
|
|
173
|
+
# @return [String]
|
|
174
|
+
# Returns the default model for chat completions
|
|
175
|
+
def default_model
|
|
176
|
+
raise NotImplementedError
|
|
177
|
+
end
|
|
178
|
+
|
|
179
|
+
##
|
|
180
|
+
# Returns an object that can generate a JSON schema
|
|
181
|
+
# @return [LLM::Schema]
|
|
182
|
+
def schema
|
|
183
|
+
@schema ||= LLM::Schema.new
|
|
184
|
+
end
|
|
185
|
+
|
|
186
|
+
##
|
|
187
|
+
# Add one or more headers to all requests
|
|
188
|
+
# @example
|
|
189
|
+
# llm = LLM.openai(key: ENV["KEY"])
|
|
190
|
+
# llm.with(headers: {"OpenAI-Organization" => ENV["ORG"]})
|
|
191
|
+
# llm.with(headers: {"OpenAI-Project" => ENV["PROJECT"]})
|
|
192
|
+
# @param [Hash<String,String>] headers
|
|
193
|
+
# One or more headers
|
|
194
|
+
# @return [LLM::Provider]
|
|
195
|
+
# Returns self
|
|
196
|
+
def with(headers:)
|
|
197
|
+
tap { (@headers ||= {}).merge!(headers) }
|
|
198
|
+
end
|
|
199
|
+
|
|
200
|
+
##
|
|
201
|
+
# @note
|
|
202
|
+
# This method might be outdated, and the {LLM::Provider#server_tool LLM::Provider#server_tool}
|
|
203
|
+
# method can be used if a tool is not found here.
|
|
204
|
+
# Returns all known tools provided by a provider.
|
|
205
|
+
# @return [String => LLM::ServerTool]
|
|
206
|
+
def server_tools
|
|
207
|
+
{}
|
|
208
|
+
end
|
|
209
|
+
|
|
210
|
+
##
|
|
211
|
+
# @note
|
|
212
|
+
# OpenAI, Anthropic, and Gemini provide platform-tools for things
|
|
213
|
+
# like web search, and more.
|
|
214
|
+
# Returns a tool provided by a provider.
|
|
215
|
+
# @example
|
|
216
|
+
# llm = LLM.openai(key: ENV["KEY"])
|
|
217
|
+
# tools = [llm.server_tool(:web_search)]
|
|
218
|
+
# res = llm.responses.create("Summarize today's news", tools:)
|
|
219
|
+
# print res.output_text, "\n"
|
|
220
|
+
# @param [String, Symbol] name The name of the tool
|
|
221
|
+
# @param [Hash] options Configuration options for the tool
|
|
222
|
+
# @return [LLM::ServerTool]
|
|
223
|
+
def server_tool(name, options = {})
|
|
224
|
+
LLM::ServerTool.new(name, options, self)
|
|
225
|
+
end
|
|
226
|
+
|
|
227
|
+
##
|
|
228
|
+
# Provides a web search capability
|
|
229
|
+
# @param [String] query The search query
|
|
230
|
+
# @raise [NotImplementedError]
|
|
231
|
+
# When the method is not implemented by a subclass
|
|
232
|
+
# @return [LLM::Response]
|
|
233
|
+
def web_search(query:)
|
|
234
|
+
raise NotImplementedError
|
|
235
|
+
end
|
|
236
|
+
|
|
237
|
+
private
|
|
238
|
+
|
|
239
|
+
attr_reader :client, :base_uri, :host, :port, :timeout, :ssl
|
|
240
|
+
|
|
241
|
+
##
|
|
242
|
+
# The headers to include with a request
|
|
243
|
+
# @raise [NotImplementedError]
|
|
244
|
+
# (see LLM::Provider#complete)
|
|
245
|
+
def headers
|
|
246
|
+
raise NotImplementedError
|
|
247
|
+
end
|
|
248
|
+
|
|
249
|
+
##
|
|
250
|
+
# @return [Class]
|
|
251
|
+
# Returns the class responsible for handling an unsuccessful LLM response
|
|
252
|
+
# @raise [NotImplementedError]
|
|
253
|
+
# (see LLM::Provider#complete)
|
|
254
|
+
def error_handler
|
|
255
|
+
raise NotImplementedError
|
|
256
|
+
end
|
|
257
|
+
|
|
258
|
+
##
|
|
259
|
+
# @return [Class]
|
|
260
|
+
def event_handler
|
|
261
|
+
LLM::EventHandler
|
|
262
|
+
end
|
|
263
|
+
|
|
264
|
+
##
|
|
265
|
+
# @return [Class]
|
|
266
|
+
# Returns the provider-specific Server-Side Events (SSE) parser
|
|
267
|
+
def stream_parser
|
|
268
|
+
raise NotImplementedError
|
|
269
|
+
end
|
|
270
|
+
|
|
271
|
+
##
|
|
272
|
+
# Executes a HTTP request
|
|
273
|
+
# @param [Net::HTTPRequest] request
|
|
274
|
+
# The request to send
|
|
275
|
+
# @param [Proc] b
|
|
276
|
+
# A block to yield the response to (optional)
|
|
277
|
+
# @return [Net::HTTPResponse]
|
|
278
|
+
# The response from the server
|
|
279
|
+
# @raise [LLM::Error::Unauthorized]
|
|
280
|
+
# When authentication fails
|
|
281
|
+
# @raise [LLM::Error::RateLimit]
|
|
282
|
+
# When the rate limit is exceeded
|
|
283
|
+
# @raise [LLM::Error::ResponseError]
|
|
284
|
+
# When any other unsuccessful status code is returned
|
|
285
|
+
# @raise [SystemCallError]
|
|
286
|
+
# When there is a network error at the operating system level
|
|
287
|
+
# @return [Net::HTTPResponse]
|
|
288
|
+
def execute(request:, stream: nil, stream_parser: self.stream_parser, &b)
|
|
289
|
+
args = (Net::HTTP === client) ? [request] : [URI.join(base_uri, request.path), request]
|
|
290
|
+
res = if stream
|
|
291
|
+
client.request(*args) do |res|
|
|
292
|
+
handler = event_handler.new stream_parser.new(stream)
|
|
293
|
+
parser = LLM::EventStream::Parser.new
|
|
294
|
+
parser.register(handler)
|
|
295
|
+
res.read_body(parser)
|
|
296
|
+
# If the handler body is empty, it means the
|
|
297
|
+
# response was most likely not streamed or
|
|
298
|
+
# parsing has failed. In that case, we fallback
|
|
299
|
+
# on the original response body.
|
|
300
|
+
res.body = handler.body.empty? ? parser.body.dup : handler.body
|
|
301
|
+
ensure
|
|
302
|
+
parser&.free
|
|
303
|
+
end
|
|
304
|
+
else
|
|
305
|
+
b ? client.request(*args) { (Net::HTTPSuccess === _1) ? b.call(_1) : _1 } :
|
|
306
|
+
client.request(*args)
|
|
307
|
+
end
|
|
308
|
+
handle_response(res)
|
|
309
|
+
end
|
|
310
|
+
|
|
311
|
+
##
|
|
312
|
+
# Handles the response from a request
|
|
313
|
+
# @param [Net::HTTPResponse] res
|
|
314
|
+
# The response to handle
|
|
315
|
+
# @return [Net::HTTPResponse]
|
|
316
|
+
def handle_response(res)
|
|
317
|
+
case res
|
|
318
|
+
when Net::HTTPOK then res
|
|
319
|
+
else error_handler.new(res).raise_error!
|
|
320
|
+
end
|
|
321
|
+
end
|
|
322
|
+
|
|
323
|
+
##
|
|
324
|
+
# @param [Net::HTTPRequest] req
|
|
325
|
+
# The request to set the body stream for
|
|
326
|
+
# @param [IO] io
|
|
327
|
+
# The IO object to set as the body stream
|
|
328
|
+
# @return [void]
|
|
329
|
+
def set_body_stream(req, io)
|
|
330
|
+
req.body_stream = io
|
|
331
|
+
req["transfer-encoding"] = "chunked" unless req["content-length"]
|
|
332
|
+
end
|
|
333
|
+
|
|
334
|
+
##
|
|
335
|
+
# Resolves tools to their function representations
|
|
336
|
+
# @param [Array<LLM::Function, LLM::Tool>] tools
|
|
337
|
+
# The tools to map
|
|
338
|
+
# @raise [TypeError]
|
|
339
|
+
# When a tool is not recognized
|
|
340
|
+
# @return [Array<LLM::Function>]
|
|
341
|
+
def resolve_tools(tools)
|
|
342
|
+
(tools || []).map do |tool|
|
|
343
|
+
if tool.respond_to?(:function)
|
|
344
|
+
tool.function
|
|
345
|
+
elsif [LLM::Function, LLM::ServerTool, Hash].any? { _1 === tool }
|
|
346
|
+
tool
|
|
347
|
+
else
|
|
348
|
+
raise TypeError, "#{tool.class} given as a tool but it is not recognized"
|
|
349
|
+
end
|
|
350
|
+
end
|
|
351
|
+
end
|
|
352
|
+
end
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
class LLM::Anthropic
|
|
4
|
+
##
|
|
5
|
+
# @private
|
|
6
|
+
class ErrorHandler
|
|
7
|
+
##
|
|
8
|
+
# @return [Net::HTTPResponse]
|
|
9
|
+
# Non-2XX response from the server
|
|
10
|
+
attr_reader :res
|
|
11
|
+
|
|
12
|
+
##
|
|
13
|
+
# @param [Net::HTTPResponse] res
|
|
14
|
+
# The response from the server
|
|
15
|
+
# @return [LLM::Anthropic::ErrorHandler]
|
|
16
|
+
def initialize(res)
|
|
17
|
+
@res = res
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
##
|
|
21
|
+
# @raise [LLM::Error]
|
|
22
|
+
# Raises a subclass of {LLM::Error LLM::Error}
|
|
23
|
+
def raise_error!
|
|
24
|
+
case res
|
|
25
|
+
when Net::HTTPServerError
|
|
26
|
+
raise LLM::ServerError.new { _1.response = res }, "Server error"
|
|
27
|
+
when Net::HTTPUnauthorized
|
|
28
|
+
raise LLM::UnauthorizedError.new { _1.response = res }, "Authentication error"
|
|
29
|
+
when Net::HTTPTooManyRequests
|
|
30
|
+
raise LLM::RateLimitError.new { _1.response = res }, "Too many requests"
|
|
31
|
+
else
|
|
32
|
+
raise LLM::ResponseError.new { _1.response = res }, "Unexpected response"
|
|
33
|
+
end
|
|
34
|
+
end
|
|
35
|
+
end
|
|
36
|
+
end
|
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
class LLM::Anthropic
|
|
4
|
+
##
|
|
5
|
+
# The {LLM::Anthropic::Files LLM::Anthropic::Files} class provides a files
|
|
6
|
+
# object for interacting with [Anthropic's Files API](https://docs.anthropic.com/en/docs/build-with-claude/files).
|
|
7
|
+
#
|
|
8
|
+
# @example
|
|
9
|
+
# #!/usr/bin/env ruby
|
|
10
|
+
# require "llm"
|
|
11
|
+
#
|
|
12
|
+
# llm = LLM.anthropic(key: ENV["KEY"])
|
|
13
|
+
# bot = LLM::Bot.new(llm)
|
|
14
|
+
# file = llm.files.create file: "/books/goodread.pdf"
|
|
15
|
+
# bot.chat ["Tell me about this PDF", file]
|
|
16
|
+
# bot.messages.select(&:assistant?).each { print "[#{_1.role}]", _1.content, "\n" }
|
|
17
|
+
class Files
|
|
18
|
+
require_relative "response/file"
|
|
19
|
+
##
|
|
20
|
+
# Returns a new Files object
|
|
21
|
+
# @param provider [LLM::Provider]
|
|
22
|
+
# @return [LLM::Anthropic::Files]
|
|
23
|
+
def initialize(provider)
|
|
24
|
+
@provider = provider
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
##
|
|
28
|
+
# List all files
|
|
29
|
+
# @example
|
|
30
|
+
# llm = LLM.anthropic(key: ENV["KEY"])
|
|
31
|
+
# res = llm.files.all
|
|
32
|
+
# res.each do |file|
|
|
33
|
+
# print "id: ", file.id, "\n"
|
|
34
|
+
# end
|
|
35
|
+
# @see https://docs.anthropic.com/en/docs/build-with-claude/files Anthropic docs
|
|
36
|
+
# @param [Hash] params Other parameters (see Anthropic docs)
|
|
37
|
+
# @raise (see LLM::Provider#request)
|
|
38
|
+
# @return [LLM::Response]
|
|
39
|
+
def all(**params)
|
|
40
|
+
query = URI.encode_www_form(params)
|
|
41
|
+
req = Net::HTTP::Get.new("/v1/files?#{query}", headers)
|
|
42
|
+
res = execute(request: req)
|
|
43
|
+
LLM::Response.new(res).extend(LLM::Anthropic::Response::Enumerable)
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
##
|
|
47
|
+
# Create a file
|
|
48
|
+
# @example
|
|
49
|
+
# llm = LLM.anthropic(key: ENV["KEY"])
|
|
50
|
+
# res = llm.files.create file: "/documents/haiku.txt"
|
|
51
|
+
# @see https://docs.anthropic.com/en/docs/build-with-claude/files Anthropic docs
|
|
52
|
+
# @param [File, LLM::File, String] file The file
|
|
53
|
+
# @param [Hash] params Other parameters (see Anthropic docs)
|
|
54
|
+
# @raise (see LLM::Provider#request)
|
|
55
|
+
# @return [LLM::Response]
|
|
56
|
+
def create(file:, **params)
|
|
57
|
+
multi = LLM::Multipart.new(params.merge!(file: LLM.File(file)))
|
|
58
|
+
req = Net::HTTP::Post.new("/v1/files", headers)
|
|
59
|
+
req["content-type"] = multi.content_type
|
|
60
|
+
set_body_stream(req, multi.body)
|
|
61
|
+
res = execute(request: req)
|
|
62
|
+
LLM::Response.new(res).extend(LLM::Anthropic::Response::File)
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
##
|
|
66
|
+
# Get a file
|
|
67
|
+
# @example
|
|
68
|
+
# llm = LLM.anthropic(key: ENV["KEY"])
|
|
69
|
+
# res = llm.files.get(file: "file-1234567890")
|
|
70
|
+
# print "id: ", res.id, "\n"
|
|
71
|
+
# @see https://docs.anthropic.com/en/docs/build-with-claude/files Anthropic docs
|
|
72
|
+
# @param [#id, #to_s] file The file ID
|
|
73
|
+
# @param [Hash] params Other parameters - if any (see Anthropic docs)
|
|
74
|
+
# @raise (see LLM::Provider#request)
|
|
75
|
+
# @return [LLM::Response]
|
|
76
|
+
def get(file:, **params)
|
|
77
|
+
file_id = file.respond_to?(:id) ? file.id : file
|
|
78
|
+
query = URI.encode_www_form(params)
|
|
79
|
+
req = Net::HTTP::Get.new("/v1/files/#{file_id}?#{query}", headers)
|
|
80
|
+
res = execute(request: req)
|
|
81
|
+
LLM::Response.new(res).extend(LLM::Anthropic::Response::File)
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
##
|
|
85
|
+
# Retrieve file metadata
|
|
86
|
+
# @example
|
|
87
|
+
# llm = LLM.anthropic(key: ENV["KEY"])
|
|
88
|
+
# res = llm.files.get_metadata(file: "file-1234567890")
|
|
89
|
+
# print "id: ", res.id, "\n"
|
|
90
|
+
# @see https://docs.anthropic.com/en/docs/build-with-claude/files
|
|
91
|
+
# @param [#id, #to_s] file The file ID
|
|
92
|
+
# @param [Hash] params Other parameters - if any (see Anthropic docs)
|
|
93
|
+
# @raise (see LLM::Provider#request)
|
|
94
|
+
# @return [LLM::Response]
|
|
95
|
+
def get_metadata(file:, **params)
|
|
96
|
+
query = URI.encode_www_form(params)
|
|
97
|
+
file_id = file.respond_to?(:id) ? file.id : file
|
|
98
|
+
req = Net::HTTP::Get.new("/v1/files/#{file_id}?#{query}", headers)
|
|
99
|
+
res = execute(request: req)
|
|
100
|
+
LLM::Response.new(res).extend(LLM::Anthropic::Response::File)
|
|
101
|
+
end
|
|
102
|
+
alias_method :retrieve_metadata, :get_metadata
|
|
103
|
+
|
|
104
|
+
##
|
|
105
|
+
# Delete a file
|
|
106
|
+
# @example
|
|
107
|
+
# llm = LLM.anthropic(key: ENV["KEY"])
|
|
108
|
+
# res = llm.files.delete(file: "file-1234567890")
|
|
109
|
+
# print res.deleted, "\n"
|
|
110
|
+
# @see https://docs.anthropic.com/en/docs/build-with-claude/files Anthropic docs
|
|
111
|
+
# @param [#id, #to_s] file The file ID
|
|
112
|
+
# @raise (see LLM::Provider#request)
|
|
113
|
+
# @return [LLM::Response]
|
|
114
|
+
def delete(file:)
|
|
115
|
+
file_id = file.respond_to?(:id) ? file.id : file
|
|
116
|
+
req = Net::HTTP::Delete.new("/v1/files/#{file_id}", headers)
|
|
117
|
+
res = execute(request: req)
|
|
118
|
+
LLM::Response.new(res)
|
|
119
|
+
end
|
|
120
|
+
|
|
121
|
+
##
|
|
122
|
+
# Download the contents of a file
|
|
123
|
+
# @note
|
|
124
|
+
# You can only download files that were created by the code
|
|
125
|
+
# execution tool. Files that you uploaded cannot be downloaded.
|
|
126
|
+
# @example
|
|
127
|
+
# llm = LLM.anthropic(key: ENV["KEY"])
|
|
128
|
+
# res = llm.files.download(file: "file-1234567890")
|
|
129
|
+
# File.binwrite "program.c", res.file.read
|
|
130
|
+
# print res.file.read, "\n"
|
|
131
|
+
# @see https://docs.anthropic.com/en/docs/build-with-claude/files Anthropic docs
|
|
132
|
+
# @param [#id, #to_s] file The file ID
|
|
133
|
+
# @param [Hash] params Other parameters (see Anthropic docs)
|
|
134
|
+
# @raise (see LLM::Provider#request)
|
|
135
|
+
# @return [LLM::Response]
|
|
136
|
+
def download(file:, **params)
|
|
137
|
+
query = URI.encode_www_form(params)
|
|
138
|
+
file_id = file.respond_to?(:id) ? file.id : file
|
|
139
|
+
req = Net::HTTP::Get.new("/v1/files/#{file_id}/content?#{query}", headers)
|
|
140
|
+
io = StringIO.new("".b)
|
|
141
|
+
res = execute(request: req) { |res| res.read_body { |chunk| io << chunk } }
|
|
142
|
+
LLM::Response.new(res).tap { _1.define_singleton_method(:file) { io } }
|
|
143
|
+
end
|
|
144
|
+
|
|
145
|
+
private
|
|
146
|
+
|
|
147
|
+
def key
|
|
148
|
+
@provider.instance_variable_get(:@key)
|
|
149
|
+
end
|
|
150
|
+
|
|
151
|
+
[:headers, :execute, :set_body_stream].each do |m|
|
|
152
|
+
define_method(m) { |*args, **kwargs, &b| @provider.send(m, *args, **kwargs, &b) }
|
|
153
|
+
end
|
|
154
|
+
end
|
|
155
|
+
end
|