llm-shell 0.9.2 → 0.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (258) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +61 -66
  3. data/lib/llm/shell/command.rb +40 -40
  4. data/lib/llm/shell/commands/clear_screen.rb +4 -18
  5. data/lib/llm/shell/commands/debug_mode.rb +12 -0
  6. data/lib/llm/shell/commands/dir_import.rb +4 -20
  7. data/lib/llm/shell/commands/disable_tool.rb +33 -0
  8. data/lib/llm/shell/commands/enable_tool.rb +33 -0
  9. data/lib/llm/shell/commands/file_import.rb +4 -20
  10. data/lib/llm/shell/commands/help.rb +23 -36
  11. data/lib/llm/shell/commands/show_chat.rb +4 -19
  12. data/lib/llm/shell/commands/show_version.rb +4 -20
  13. data/lib/llm/shell/commands/system_prompt.rb +4 -18
  14. data/lib/llm/shell/completion.rb +5 -5
  15. data/lib/llm/shell/config.rb +4 -5
  16. data/lib/llm/shell/formatter.rb +1 -2
  17. data/lib/llm/shell/internal/coderay/lib/coderay/duo.rb +81 -0
  18. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/_map.rb +17 -0
  19. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/comment_filter.rb +25 -0
  20. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/count.rb +39 -0
  21. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/debug.rb +49 -0
  22. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/debug_lint.rb +63 -0
  23. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/div.rb +23 -0
  24. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/encoder.rb +190 -0
  25. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/filter.rb +58 -0
  26. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/html/css.rb +65 -0
  27. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/html/numbering.rb +108 -0
  28. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/html/output.rb +164 -0
  29. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/html.rb +333 -0
  30. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/json.rb +83 -0
  31. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/lines_of_code.rb +45 -0
  32. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/lint.rb +59 -0
  33. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/null.rb +18 -0
  34. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/page.rb +24 -0
  35. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/span.rb +23 -0
  36. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/statistic.rb +95 -0
  37. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/terminal.rb +195 -0
  38. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/text.rb +46 -0
  39. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/token_kind_filter.rb +111 -0
  40. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/xml.rb +72 -0
  41. data/lib/llm/shell/internal/coderay/lib/coderay/encoders/yaml.rb +50 -0
  42. data/lib/llm/shell/internal/coderay/lib/coderay/encoders.rb +18 -0
  43. data/lib/llm/shell/internal/coderay/lib/coderay/for_redcloth.rb +95 -0
  44. data/lib/llm/shell/internal/coderay/lib/coderay/helpers/file_type.rb +151 -0
  45. data/lib/llm/shell/internal/coderay/lib/coderay/helpers/plugin.rb +55 -0
  46. data/lib/llm/shell/internal/coderay/lib/coderay/helpers/plugin_host.rb +221 -0
  47. data/lib/llm/shell/internal/coderay/lib/coderay/helpers/word_list.rb +72 -0
  48. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/_map.rb +24 -0
  49. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/c.rb +189 -0
  50. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/clojure.rb +217 -0
  51. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/cpp.rb +217 -0
  52. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/css.rb +196 -0
  53. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/debug.rb +75 -0
  54. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/delphi.rb +144 -0
  55. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/diff.rb +221 -0
  56. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/erb.rb +81 -0
  57. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/go.rb +208 -0
  58. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/groovy.rb +268 -0
  59. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/haml.rb +168 -0
  60. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/html.rb +275 -0
  61. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/java/builtin_types.rb +421 -0
  62. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/java.rb +174 -0
  63. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/java_script.rb +236 -0
  64. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/json.rb +98 -0
  65. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/lua.rb +280 -0
  66. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/php.rb +527 -0
  67. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/python.rb +287 -0
  68. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/raydebug.rb +75 -0
  69. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/ruby/patterns.rb +178 -0
  70. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/ruby/string_state.rb +79 -0
  71. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/ruby.rb +477 -0
  72. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/sass.rb +232 -0
  73. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/scanner.rb +337 -0
  74. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/sql.rb +169 -0
  75. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/taskpaper.rb +36 -0
  76. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/text.rb +26 -0
  77. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/xml.rb +17 -0
  78. data/lib/llm/shell/internal/coderay/lib/coderay/scanners/yaml.rb +140 -0
  79. data/lib/llm/shell/internal/coderay/lib/coderay/scanners.rb +27 -0
  80. data/lib/llm/shell/internal/coderay/lib/coderay/styles/_map.rb +7 -0
  81. data/lib/llm/shell/internal/coderay/lib/coderay/styles/alpha.rb +153 -0
  82. data/lib/llm/shell/internal/coderay/lib/coderay/styles/style.rb +18 -0
  83. data/lib/llm/shell/internal/coderay/lib/coderay/styles.rb +15 -0
  84. data/lib/llm/shell/internal/coderay/lib/coderay/token_kinds.rb +85 -0
  85. data/lib/llm/shell/internal/coderay/lib/coderay/tokens.rb +164 -0
  86. data/lib/llm/shell/internal/coderay/lib/coderay/tokens_proxy.rb +55 -0
  87. data/lib/llm/shell/internal/coderay/lib/coderay/version.rb +3 -0
  88. data/lib/llm/shell/internal/coderay/lib/coderay.rb +284 -0
  89. data/lib/llm/shell/internal/io-line/lib/io/line/multiple.rb +19 -0
  90. data/lib/{io → llm/shell/internal/io-line/lib/io}/line.rb +2 -0
  91. data/lib/llm/shell/internal/llm.rb/lib/llm/bot/builder.rb +31 -0
  92. data/lib/llm/shell/internal/llm.rb/lib/llm/bot/conversable.rb +37 -0
  93. data/lib/llm/shell/internal/llm.rb/lib/llm/bot/prompt/completion.rb +49 -0
  94. data/lib/llm/shell/internal/llm.rb/lib/llm/bot/prompt/respond.rb +49 -0
  95. data/lib/llm/shell/internal/llm.rb/lib/llm/bot.rb +150 -0
  96. data/lib/llm/shell/internal/llm.rb/lib/llm/buffer.rb +162 -0
  97. data/lib/llm/shell/internal/llm.rb/lib/llm/client.rb +36 -0
  98. data/lib/llm/shell/internal/llm.rb/lib/llm/error.rb +49 -0
  99. data/lib/llm/shell/internal/llm.rb/lib/llm/eventhandler.rb +44 -0
  100. data/lib/llm/shell/internal/llm.rb/lib/llm/eventstream/event.rb +69 -0
  101. data/lib/llm/shell/internal/llm.rb/lib/llm/eventstream/parser.rb +88 -0
  102. data/lib/llm/shell/internal/llm.rb/lib/llm/eventstream.rb +8 -0
  103. data/lib/llm/shell/internal/llm.rb/lib/llm/file.rb +91 -0
  104. data/lib/llm/shell/internal/llm.rb/lib/llm/function.rb +177 -0
  105. data/lib/llm/shell/internal/llm.rb/lib/llm/message.rb +178 -0
  106. data/lib/llm/shell/internal/llm.rb/lib/llm/mime.rb +140 -0
  107. data/lib/llm/shell/internal/llm.rb/lib/llm/multipart.rb +101 -0
  108. data/lib/llm/shell/internal/llm.rb/lib/llm/object/builder.rb +38 -0
  109. data/lib/llm/shell/internal/llm.rb/lib/llm/object/kernel.rb +53 -0
  110. data/lib/llm/shell/internal/llm.rb/lib/llm/object.rb +89 -0
  111. data/lib/llm/shell/internal/llm.rb/lib/llm/provider.rb +352 -0
  112. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/error_handler.rb +36 -0
  113. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/files.rb +155 -0
  114. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/format/completion_format.rb +88 -0
  115. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/format.rb +29 -0
  116. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/models.rb +54 -0
  117. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/response/completion.rb +39 -0
  118. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/response/enumerable.rb +11 -0
  119. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/response/file.rb +23 -0
  120. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/response/web_search.rb +21 -0
  121. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic/stream_parser.rb +66 -0
  122. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/anthropic.rb +138 -0
  123. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/deepseek/format/completion_format.rb +68 -0
  124. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/deepseek/format.rb +27 -0
  125. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/deepseek.rb +75 -0
  126. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/audio.rb +73 -0
  127. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/error_handler.rb +47 -0
  128. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/files.rb +146 -0
  129. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/format/completion_format.rb +69 -0
  130. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/format.rb +39 -0
  131. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/images.rb +133 -0
  132. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/models.rb +60 -0
  133. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/response/completion.rb +35 -0
  134. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/response/embedding.rb +8 -0
  135. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/response/file.rb +11 -0
  136. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/response/files.rb +15 -0
  137. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/response/image.rb +31 -0
  138. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/response/models.rb +15 -0
  139. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/response/web_search.rb +22 -0
  140. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini/stream_parser.rb +86 -0
  141. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/gemini.rb +173 -0
  142. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/llamacpp.rb +74 -0
  143. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/ollama/error_handler.rb +36 -0
  144. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/ollama/format/completion_format.rb +77 -0
  145. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/ollama/format.rb +29 -0
  146. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/ollama/models.rb +56 -0
  147. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/ollama/response/completion.rb +28 -0
  148. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/ollama/response/embedding.rb +9 -0
  149. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/ollama/stream_parser.rb +44 -0
  150. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/ollama.rb +116 -0
  151. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/audio.rb +91 -0
  152. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/error_handler.rb +46 -0
  153. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/files.rb +134 -0
  154. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/format/completion_format.rb +90 -0
  155. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/format/moderation_format.rb +35 -0
  156. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/format/respond_format.rb +72 -0
  157. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/format.rb +54 -0
  158. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/images.rb +109 -0
  159. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/models.rb +55 -0
  160. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/moderations.rb +65 -0
  161. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/response/audio.rb +7 -0
  162. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/response/completion.rb +40 -0
  163. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/response/embedding.rb +9 -0
  164. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/response/enumerable.rb +23 -0
  165. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/response/file.rb +7 -0
  166. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/response/image.rb +16 -0
  167. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/response/moderations.rb +34 -0
  168. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/response/responds.rb +48 -0
  169. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/response/web_search.rb +21 -0
  170. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/responses/stream_parser.rb +76 -0
  171. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/responses.rb +99 -0
  172. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/stream_parser.rb +86 -0
  173. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai/vector_stores.rb +228 -0
  174. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/openai.rb +206 -0
  175. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/xai/images.rb +58 -0
  176. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/xai.rb +72 -0
  177. data/lib/llm/shell/internal/llm.rb/lib/llm/providers/zai.rb +74 -0
  178. data/lib/llm/shell/internal/llm.rb/lib/llm/response.rb +67 -0
  179. data/lib/llm/shell/internal/llm.rb/lib/llm/schema/array.rb +26 -0
  180. data/lib/llm/shell/internal/llm.rb/lib/llm/schema/boolean.rb +13 -0
  181. data/lib/llm/shell/internal/llm.rb/lib/llm/schema/integer.rb +43 -0
  182. data/lib/llm/shell/internal/llm.rb/lib/llm/schema/leaf.rb +78 -0
  183. data/lib/llm/shell/internal/llm.rb/lib/llm/schema/null.rb +13 -0
  184. data/lib/llm/shell/internal/llm.rb/lib/llm/schema/number.rb +43 -0
  185. data/lib/llm/shell/internal/llm.rb/lib/llm/schema/object.rb +41 -0
  186. data/lib/llm/shell/internal/llm.rb/lib/llm/schema/string.rb +34 -0
  187. data/lib/llm/shell/internal/llm.rb/lib/llm/schema/version.rb +8 -0
  188. data/lib/llm/shell/internal/llm.rb/lib/llm/schema.rb +81 -0
  189. data/lib/llm/shell/internal/llm.rb/lib/llm/server_tool.rb +32 -0
  190. data/lib/llm/shell/internal/llm.rb/lib/llm/tool/param.rb +75 -0
  191. data/lib/llm/shell/internal/llm.rb/lib/llm/tool.rb +78 -0
  192. data/lib/llm/shell/internal/llm.rb/lib/llm/utils.rb +19 -0
  193. data/lib/llm/shell/internal/llm.rb/lib/llm/version.rb +5 -0
  194. data/lib/llm/shell/internal/llm.rb/lib/llm.rb +121 -0
  195. data/lib/llm/shell/internal/optparse/lib/optionparser.rb +2 -0
  196. data/lib/llm/shell/internal/optparse/lib/optparse/ac.rb +70 -0
  197. data/lib/llm/shell/internal/optparse/lib/optparse/date.rb +18 -0
  198. data/lib/llm/shell/internal/optparse/lib/optparse/kwargs.rb +27 -0
  199. data/lib/llm/shell/internal/optparse/lib/optparse/shellwords.rb +7 -0
  200. data/lib/llm/shell/internal/optparse/lib/optparse/time.rb +11 -0
  201. data/lib/llm/shell/internal/optparse/lib/optparse/uri.rb +7 -0
  202. data/lib/llm/shell/internal/optparse/lib/optparse/version.rb +80 -0
  203. data/lib/llm/shell/internal/optparse/lib/optparse.rb +2469 -0
  204. data/lib/llm/shell/internal/paint/lib/paint/constants.rb +104 -0
  205. data/lib/llm/shell/internal/paint/lib/paint/pa.rb +13 -0
  206. data/lib/llm/shell/internal/paint/lib/paint/rgb_colors.rb +14 -0
  207. data/lib/llm/shell/internal/paint/lib/paint/shortcuts.rb +100 -0
  208. data/lib/llm/shell/internal/paint/lib/paint/shortcuts_version.rb +5 -0
  209. data/lib/llm/shell/internal/paint/lib/paint/util.rb +16 -0
  210. data/lib/llm/shell/internal/paint/lib/paint/version.rb +5 -0
  211. data/lib/llm/shell/internal/paint/lib/paint.rb +261 -0
  212. data/lib/llm/shell/internal/reline/lib/reline/config.rb +378 -0
  213. data/lib/llm/shell/internal/reline/lib/reline/face.rb +199 -0
  214. data/lib/llm/shell/internal/reline/lib/reline/history.rb +76 -0
  215. data/lib/llm/shell/internal/reline/lib/reline/io/ansi.rb +322 -0
  216. data/lib/llm/shell/internal/reline/lib/reline/io/dumb.rb +120 -0
  217. data/lib/llm/shell/internal/reline/lib/reline/io/windows.rb +530 -0
  218. data/lib/llm/shell/internal/reline/lib/reline/io.rb +55 -0
  219. data/lib/llm/shell/internal/reline/lib/reline/key_actor/base.rb +37 -0
  220. data/lib/llm/shell/internal/reline/lib/reline/key_actor/composite.rb +17 -0
  221. data/lib/llm/shell/internal/reline/lib/reline/key_actor/emacs.rb +517 -0
  222. data/lib/llm/shell/internal/reline/lib/reline/key_actor/vi_command.rb +518 -0
  223. data/lib/llm/shell/internal/reline/lib/reline/key_actor/vi_insert.rb +517 -0
  224. data/lib/llm/shell/internal/reline/lib/reline/key_actor.rb +8 -0
  225. data/lib/llm/shell/internal/reline/lib/reline/key_stroke.rb +119 -0
  226. data/lib/llm/shell/internal/reline/lib/reline/kill_ring.rb +125 -0
  227. data/lib/llm/shell/internal/reline/lib/reline/line_editor.rb +2356 -0
  228. data/lib/llm/shell/internal/reline/lib/reline/unicode/east_asian_width.rb +1292 -0
  229. data/lib/llm/shell/internal/reline/lib/reline/unicode.rb +421 -0
  230. data/lib/llm/shell/internal/reline/lib/reline/version.rb +3 -0
  231. data/lib/llm/shell/internal/reline/lib/reline.rb +527 -0
  232. data/lib/llm/shell/internal/tomlrb/lib/tomlrb/generated_parser.rb +712 -0
  233. data/lib/llm/shell/internal/tomlrb/lib/tomlrb/handler.rb +268 -0
  234. data/lib/llm/shell/internal/tomlrb/lib/tomlrb/local_date.rb +35 -0
  235. data/lib/llm/shell/internal/tomlrb/lib/tomlrb/local_date_time.rb +42 -0
  236. data/lib/llm/shell/internal/tomlrb/lib/tomlrb/local_time.rb +40 -0
  237. data/lib/llm/shell/internal/tomlrb/lib/tomlrb/parser.rb +21 -0
  238. data/lib/llm/shell/internal/tomlrb/lib/tomlrb/scanner.rb +92 -0
  239. data/lib/llm/shell/internal/tomlrb/lib/tomlrb/string_utils.rb +40 -0
  240. data/lib/llm/shell/internal/tomlrb/lib/tomlrb/version.rb +5 -0
  241. data/lib/llm/shell/internal/tomlrb/lib/tomlrb.rb +49 -0
  242. data/lib/llm/shell/options.rb +1 -1
  243. data/lib/llm/shell/renderer.rb +2 -3
  244. data/lib/llm/shell/repl.rb +21 -16
  245. data/lib/llm/shell/tool.rb +42 -0
  246. data/lib/llm/shell/tools/read_file.rb +15 -0
  247. data/lib/llm/shell/tools/system.rb +17 -0
  248. data/lib/llm/shell/tools/write_file.rb +16 -0
  249. data/lib/llm/shell/version.rb +1 -1
  250. data/lib/llm/shell.rb +83 -39
  251. data/libexec/llm-shell/shell +4 -6
  252. data/llm-shell.gemspec +0 -4
  253. metadata +233 -63
  254. data/lib/llm/function.rb +0 -17
  255. data/lib/llm/shell/command/extension.rb +0 -42
  256. data/lib/llm/shell/commands/utils.rb +0 -21
  257. data/lib/llm/shell/functions/read_file.rb +0 -22
  258. data/lib/llm/shell/functions/write_file.rb +0 -22
@@ -0,0 +1,88 @@
1
+ # frozen_string_literal: true
2
+
3
+ module LLM::Anthropic::Format
4
+ ##
5
+ # @private
6
+ class CompletionFormat
7
+ ##
8
+ # @param [LLM::Message, Hash] message
9
+ # The message to format
10
+ def initialize(message)
11
+ @message = message
12
+ end
13
+
14
+ ##
15
+ # Formats the message for the Anthropic chat completions API
16
+ # @return [Hash]
17
+ def format
18
+ catch(:abort) do
19
+ if Hash === message
20
+ {role: message[:role], content: format_content(message[:content])}
21
+ else
22
+ format_message
23
+ end
24
+ end
25
+ end
26
+
27
+ private
28
+
29
+ def format_message
30
+ if message.tool_call?
31
+ {role: message.role, content: message.extra[:original_tool_calls]}
32
+ else
33
+ {role: message.role, content: format_content(content)}
34
+ end
35
+ end
36
+
37
+ ##
38
+ # @param [String, URI] content
39
+ # The content to format
40
+ # @return [String, Hash]
41
+ # The formatted content
42
+ def format_content(content)
43
+ case content
44
+ when Hash
45
+ content.empty? ? throw(:abort, nil) : [content]
46
+ when Array
47
+ content.empty? ? throw(:abort, nil) : content.flat_map { format_content(_1) }
48
+ when URI
49
+ [{type: :image, source: {type: "url", url: content.to_s}}]
50
+ when File
51
+ content.close unless content.closed?
52
+ format_content(LLM.File(content.path))
53
+ when LLM::File
54
+ if content.image?
55
+ [{type: :image, source: {type: "base64", media_type: content.mime_type, data: content.to_b64}}]
56
+ elsif content.pdf?
57
+ [{type: :document, source: {type: "base64", media_type: content.mime_type, data: content.to_b64}}]
58
+ else
59
+ raise LLM::PromptError, "The given object (an instance of #{content.class}) " \
60
+ "is not an image or PDF, and therefore not supported by the " \
61
+ "Anthropic API"
62
+ end
63
+ when LLM::Response
64
+ if content.file?
65
+ [{type: content.file_type, source: {type: :file, file_id: content.id}}]
66
+ else
67
+ prompt_error!(content)
68
+ end
69
+ when String
70
+ [{type: :text, text: content}]
71
+ when LLM::Message
72
+ format_content(content.content)
73
+ when LLM::Function::Return
74
+ [{type: "tool_result", tool_use_id: content.id, content: [{type: :text, text: JSON.dump(content.value)}]}]
75
+ else
76
+ prompt_error!(content)
77
+ end
78
+ end
79
+
80
+ def prompt_error!(content)
81
+ raise LLM::PromptError, "The given object (an instance of #{content.class}) " \
82
+ "is not supported by the Anthropic API"
83
+ end
84
+
85
+ def message = @message
86
+ def content = message.content
87
+ end
88
+ end
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ class LLM::Anthropic
4
+ ##
5
+ # @private
6
+ module Format
7
+ require_relative "format/completion_format"
8
+
9
+ ##
10
+ # @param [Array<LLM::Message>] messages
11
+ # The messages to format
12
+ # @return [Array<Hash>]
13
+ def format(messages)
14
+ messages.filter_map do
15
+ CompletionFormat.new(_1).format
16
+ end
17
+ end
18
+
19
+ private
20
+
21
+ ##
22
+ # @param [Hash] params
23
+ # @return [Hash]
24
+ def format_tools(tools)
25
+ return {} unless tools&.any?
26
+ {tools: tools.map { _1.respond_to?(:format) ? _1.format(self) : _1 }}
27
+ end
28
+ end
29
+ end
@@ -0,0 +1,54 @@
1
+ # frozen_string_literal: true
2
+
3
+ class LLM::Anthropic
4
+ require_relative "response/enumerable"
5
+ ##
6
+ # The {LLM::Anthropic::Models LLM::Anthropic::Models} class provides a model
7
+ # object for interacting with [Anthropic's models API](https://platform.anthropic.com/docs/api-reference/models/list).
8
+ # The models API allows a client to query Anthropic for a list of models
9
+ # that are available for use with the Anthropic API.
10
+ #
11
+ # @example
12
+ # #!/usr/bin/env ruby
13
+ # require "llm"
14
+ #
15
+ # llm = LLM.anthropic(key: ENV["KEY"])
16
+ # res = llm.models.all
17
+ # res.each do |model|
18
+ # print "id: ", model.id, "\n"
19
+ # end
20
+ class Models
21
+ ##
22
+ # Returns a new Models object
23
+ # @param provider [LLM::Provider]
24
+ # @return [LLM::Anthropic::Files]
25
+ def initialize(provider)
26
+ @provider = provider
27
+ end
28
+
29
+ ##
30
+ # List all models
31
+ # @example
32
+ # llm = LLM.anthropic(key: ENV["KEY"])
33
+ # res = llm.models.all
34
+ # res.each do |model|
35
+ # print "id: ", model.id, "\n"
36
+ # end
37
+ # @see https://docs.anthropic.com/en/api/models-list Anthropic docs
38
+ # @param [Hash] params Other parameters (see Anthropic docs)
39
+ # @raise (see LLM::Provider#request)
40
+ # @return [LLM::Response]
41
+ def all(**params)
42
+ query = URI.encode_www_form(params)
43
+ req = Net::HTTP::Get.new("/v1/models?#{query}", headers)
44
+ res = execute(request: req)
45
+ LLM::Response.new(res).extend(LLM::Anthropic::Response::Enumerable)
46
+ end
47
+
48
+ private
49
+
50
+ [:headers, :execute].each do |m|
51
+ define_method(m) { |*args, **kwargs, &b| @provider.send(m, *args, **kwargs, &b) }
52
+ end
53
+ end
54
+ end
@@ -0,0 +1,39 @@
1
+ # frozen_string_literal: true
2
+
3
+ module LLM::Anthropic::Response
4
+ module Completion
5
+ def choices = format_choices
6
+ def role = body.role
7
+ def model = body.model
8
+ def prompt_tokens = body.usage["input_tokens"] || 0
9
+ def completion_tokens = body.usage["output_tokens"] || 0
10
+ def total_tokens = prompt_tokens + completion_tokens
11
+
12
+ private
13
+
14
+ def format_choices
15
+ texts.map.with_index do |choice, index|
16
+ extra = {
17
+ index:, response: self,
18
+ tool_calls: format_tool_calls(tools), original_tool_calls: tools
19
+ }
20
+ LLM::Message.new(role, choice["text"], extra)
21
+ end
22
+ end
23
+
24
+ def format_tool_calls(tools)
25
+ (tools || []).filter_map do |tool|
26
+ tool = {
27
+ id: tool.id,
28
+ name: tool.name,
29
+ arguments: tool.input
30
+ }
31
+ LLM::Object.new(tool)
32
+ end
33
+ end
34
+
35
+ def parts = body.content
36
+ def texts = @texts ||= LLM::Object.from_hash(parts.select { _1["type"] == "text" })
37
+ def tools = @tools ||= LLM::Object.from_hash(parts.select { _1["type"] == "tool_use" })
38
+ end
39
+ end
@@ -0,0 +1,11 @@
1
+ # frozen_string_literal: true
2
+
3
+ module LLM::Anthropic::Response
4
+ module Enumerable
5
+ include ::Enumerable
6
+ def each(&)
7
+ return enum_for(:each) unless block_given?
8
+ data.each { yield(_1) }
9
+ end
10
+ end
11
+ end
@@ -0,0 +1,23 @@
1
+ # frozen_string_literal: true
2
+
3
+ module LLM::Anthropic::Response
4
+ module File
5
+ ##
6
+ # Always return true
7
+ # @return [Boolean]
8
+ def file? = true
9
+
10
+ ##
11
+ # Returns the file type referenced by a prompt
12
+ # @return [Symbol]
13
+ def file_type
14
+ if mime_type.start_with?("image/")
15
+ :image
16
+ elsif mime_type == "text/plain" || mime_type == "application/pdf"
17
+ :document
18
+ else
19
+ :container_upload
20
+ end
21
+ end
22
+ end
23
+ end
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ module LLM::Anthropic::Response
4
+ ##
5
+ # The {LLM::Anthropic::Response::WebSearch LLM::Anthropic::Response::WebSearch}
6
+ # module provides methods for accessing web search results from a web search
7
+ # tool call made via the {LLM::Provider#web_search LLM::Provider#web_search}
8
+ # method.
9
+ module WebSearch
10
+ ##
11
+ # Returns one or more search results
12
+ # @return [Array<LLM::Object>]
13
+ def search_results
14
+ LLM::Object.from_hash(
15
+ content
16
+ .select { _1["type"] == "web_search_tool_result" }
17
+ .flat_map { |n| n.content.map { _1.slice(:title, :url) } }
18
+ )
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,66 @@
1
+ # frozen_string_literal: true
2
+
3
+ class LLM::Anthropic
4
+ ##
5
+ # @private
6
+ class StreamParser
7
+ ##
8
+ # Returns the fully constructed response body
9
+ # @return [LLM::Object]
10
+ attr_reader :body
11
+
12
+ ##
13
+ # @param [#<<] io An IO-like object
14
+ # @return [LLM::Anthropic::StreamParser]
15
+ def initialize(io)
16
+ @body = LLM::Object.new(role: "assistant", content: [])
17
+ @io = io
18
+ end
19
+
20
+ ##
21
+ # @param [Hash] chunk
22
+ # @return [LLM::Anthropic::StreamParser]
23
+ def parse!(chunk)
24
+ tap { merge!(chunk) }
25
+ end
26
+
27
+ private
28
+
29
+ def merge!(chunk)
30
+ if chunk["type"] == "message_start"
31
+ merge_message!(chunk["message"])
32
+ elsif chunk["type"] == "content_block_start"
33
+ @body["content"][chunk["index"]] = chunk["content_block"]
34
+ elsif chunk["type"] == "content_block_delta"
35
+ if chunk["delta"]["type"] == "text_delta"
36
+ @body.content[chunk["index"]]["text"] << chunk["delta"]["text"]
37
+ @io << chunk["delta"]["text"] if @io.respond_to?(:<<)
38
+ elsif chunk["delta"]["type"] == "input_json_delta"
39
+ content = @body.content[chunk["index"]]
40
+ if Hash === content["input"]
41
+ content["input"] = chunk["delta"]["partial_json"]
42
+ else
43
+ content["input"] << chunk["delta"]["partial_json"]
44
+ end
45
+ end
46
+ elsif chunk["type"] == "message_delta"
47
+ merge_message!(chunk["delta"])
48
+ elsif chunk["type"] == "content_block_stop"
49
+ content = @body.content[chunk["index"]]
50
+ if content["input"]
51
+ content["input"] = JSON.parse(content["input"])
52
+ end
53
+ end
54
+ end
55
+
56
+ def merge_message!(message)
57
+ message.each do |key, value|
58
+ @body[key] = if value.respond_to?(:each_pair)
59
+ merge_message!(value)
60
+ else
61
+ value
62
+ end
63
+ end
64
+ end
65
+ end
66
+ end
@@ -0,0 +1,138 @@
1
+ # frozen_string_literal: true
2
+
3
+ module LLM
4
+ ##
5
+ # The Anthropic class implements a provider for
6
+ # [Anthropic](https://www.anthropic.com).
7
+ #
8
+ # @example
9
+ # #!/usr/bin/env ruby
10
+ # require "llm"
11
+ #
12
+ # llm = LLM.anthropic(key: ENV["KEY"])
13
+ # bot = LLM::Bot.new(llm)
14
+ # bot.chat ["Tell me about this photo", File.open("/images/dog.jpg", "rb")]
15
+ # bot.messages.select(&:assistant?).each { print "[#{_1.role}]", _1.content, "\n" }
16
+ class Anthropic < Provider
17
+ require_relative "anthropic/response/completion"
18
+ require_relative "anthropic/response/web_search"
19
+ require_relative "anthropic/format"
20
+ require_relative "anthropic/error_handler"
21
+ require_relative "anthropic/stream_parser"
22
+ require_relative "anthropic/files"
23
+ require_relative "anthropic/models"
24
+ include Format
25
+
26
+ HOST = "api.anthropic.com"
27
+
28
+ ##
29
+ # @param key (see LLM::Provider#initialize)
30
+ def initialize(**)
31
+ super(host: HOST, **)
32
+ end
33
+
34
+ ##
35
+ # Provides an interface to the chat completions API
36
+ # @see https://docs.anthropic.com/en/api/messages Anthropic docs
37
+ # @param prompt (see LLM::Provider#complete)
38
+ # @param params (see LLM::Provider#complete)
39
+ # @example (see LLM::Provider#complete)
40
+ # @raise (see LLM::Provider#request)
41
+ # @raise [LLM::PromptError]
42
+ # When given an object a provider does not understand
43
+ # @return (see LLM::Provider#complete)
44
+ def complete(prompt, params = {})
45
+ params = {role: :user, model: default_model, max_tokens: 1024}.merge!(params)
46
+ tools = resolve_tools(params.delete(:tools))
47
+ params = [params, format_tools(tools)].inject({}, &:merge!).compact
48
+ role, stream = params.delete(:role), params.delete(:stream)
49
+ params[:stream] = true if stream.respond_to?(:<<) || stream == true
50
+ req = Net::HTTP::Post.new("/v1/messages", headers)
51
+ messages = [*(params.delete(:messages) || []), Message.new(role, prompt)]
52
+ body = JSON.dump({messages: [format(messages)].flatten}.merge!(params))
53
+ set_body_stream(req, StringIO.new(body))
54
+ res = execute(request: req, stream:)
55
+ LLM::Response.new(res)
56
+ .extend(LLM::Anthropic::Response::Completion)
57
+ .extend(Module.new { define_method(:__tools__) { tools } })
58
+ end
59
+
60
+ ##
61
+ # Provides an interface to Anthropic's models API
62
+ # @see https://docs.anthropic.com/en/api/models-list
63
+ # @return [LLM::Anthropic::Models]
64
+ def models
65
+ LLM::Anthropic::Models.new(self)
66
+ end
67
+
68
+ ##
69
+ # Provides an interface to Anthropic's files API
70
+ # @see https://docs.anthropic.com/en/docs/build-with-claude/files Anthropic docs
71
+ # @return [LLM::Anthropic::Files]
72
+ def files
73
+ LLM::Anthropic::Files.new(self)
74
+ end
75
+
76
+ ##
77
+ # @return (see LLM::Provider#assistant_role)
78
+ def assistant_role
79
+ "assistant"
80
+ end
81
+
82
+ ##
83
+ # Returns the default model for chat completions
84
+ # @see https://docs.anthropic.com/en/docs/about-claude/models/all-models#model-comparison-table claude-sonnet-4-20250514
85
+ # @return [String]
86
+ def default_model
87
+ "claude-sonnet-4-20250514"
88
+ end
89
+
90
+ ##
91
+ # @note
92
+ # This method includes certain tools that require configuration
93
+ # through a set of options that are easier to set through the
94
+ # {LLM::Provider#server_tool LLM::Provider#server_tool} method.
95
+ # @see https://docs.anthropic.com/en/docs/agents-and-tools/tool-use/web-search-tool Anthropic docs
96
+ # @return (see LLM::Provider#server_tools)
97
+ def server_tools
98
+ {
99
+ bash: server_tool(:bash, type: "bash_20250124"),
100
+ web_search: server_tool(:web_search, type: "web_search_20250305", max_uses: 5),
101
+ text_editor: server_tool(:str_replace_based_edit_tool, type: "text_editor_20250728", max_characters: 10_000)
102
+ }
103
+ end
104
+
105
+ ##
106
+ # A convenience method for performing a web search using the
107
+ # Anthropic web search tool.
108
+ # @example
109
+ # llm = LLM.anthropic(key: ENV["KEY"])
110
+ # res = llm.web_search(query: "summarize today's news")
111
+ # res.search_results.each { |item| print item.title, ": ", item.url, "\n" }
112
+ # @param query [String] The search query.
113
+ # @return [LLM::Response] The response from the LLM provider.
114
+ def web_search(query:)
115
+ complete(query, tools: [server_tools[:web_search]])
116
+ .extend(LLM::Anthropic::Response::WebSearch)
117
+ end
118
+
119
+ private
120
+
121
+ def headers
122
+ (@headers || {}).merge(
123
+ "Content-Type" => "application/json",
124
+ "x-api-key" => @key,
125
+ "anthropic-version" => "2023-06-01",
126
+ "anthropic-beta" => "files-api-2025-04-14"
127
+ )
128
+ end
129
+
130
+ def stream_parser
131
+ LLM::Anthropic::StreamParser
132
+ end
133
+
134
+ def error_handler
135
+ LLM::Anthropic::ErrorHandler
136
+ end
137
+ end
138
+ end
@@ -0,0 +1,68 @@
1
+ # frozen_string_literal: true
2
+
3
+ module LLM::DeepSeek::Format
4
+ ##
5
+ # @private
6
+ class CompletionFormat
7
+ ##
8
+ # @param [LLM::Message, Hash] message
9
+ # The message to format
10
+ def initialize(message)
11
+ @message = message
12
+ end
13
+
14
+ ##
15
+ # Formats the message for the DeepSeek chat completions API
16
+ # @return [Hash]
17
+ def format
18
+ catch(:abort) do
19
+ if Hash === message
20
+ {role: message[:role], content: format_content(message[:content])}
21
+ elsif message.tool_call?
22
+ {role: message.role, content: nil, tool_calls: message.extra[:original_tool_calls]}
23
+ else
24
+ format_message
25
+ end
26
+ end
27
+ end
28
+
29
+ private
30
+
31
+ def format_content(content)
32
+ case content
33
+ when String
34
+ content.to_s
35
+ when LLM::Message
36
+ format_content(content.content)
37
+ when LLM::Function::Return
38
+ throw(:abort, {role: "tool", tool_call_id: content.id, content: JSON.dump(content.value)})
39
+ else
40
+ raise LLM::PromptError, "The given object (an instance of #{content.class}) " \
41
+ "is not supported by the DeepSeek chat completions API"
42
+ end
43
+ end
44
+
45
+ def format_message
46
+ case content
47
+ when Array
48
+ format_array
49
+ else
50
+ {role: message.role, content: format_content(content)}
51
+ end
52
+ end
53
+
54
+ def format_array
55
+ if content.empty?
56
+ nil
57
+ elsif returns.any?
58
+ returns.map { {role: "tool", tool_call_id: _1.id, content: JSON.dump(_1.value)} }
59
+ else
60
+ {role: message.role, content: content.flat_map { format_content(_1) }}
61
+ end
62
+ end
63
+
64
+ def message = @message
65
+ def content = message.content
66
+ def returns = content.grep(LLM::Function::Return)
67
+ end
68
+ end
@@ -0,0 +1,27 @@
1
+ # frozen_string_literal: true
2
+
3
+ class LLM::DeepSeek
4
+ ##
5
+ # @private
6
+ module Format
7
+ require_relative "format/completion_format"
8
+ ##
9
+ # @param [Array<LLM::Message>] messages
10
+ # The messages to format
11
+ # @return [Array<Hash>]
12
+ def format(messages, ...)
13
+ messages.filter_map do |message|
14
+ CompletionFormat.new(message).format
15
+ end
16
+ end
17
+
18
+ private
19
+
20
+ ##
21
+ # @param [Hash] params
22
+ # @return [Hash]
23
+ def format_tools(tools)
24
+ (tools.nil? || tools.empty?) ? {} : {tools: tools.map { _1.format(self) }}
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,75 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative "openai" unless defined?(LLM::OpenAI)
4
+
5
+ module LLM
6
+ ##
7
+ # The DeepSeek class implements a provider for
8
+ # [DeepSeek](https://deepseek.com)
9
+ # through its OpenAI-compatible API available via
10
+ # their [web platform](https://platform.deepseek.com).
11
+ #
12
+ # @example
13
+ # #!/usr/bin/env ruby
14
+ # require "llm"
15
+ #
16
+ # llm = LLM.deepseek(key: ENV["KEY"])
17
+ # bot = LLM::Bot.new(llm)
18
+ # bot.chat ["Tell me about this photo", File.open("/images/cat.jpg", "rb")]
19
+ # bot.messages.select(&:assistant?).each { print "[#{_1.role}]", _1.content, "\n" }
20
+ class DeepSeek < OpenAI
21
+ require_relative "deepseek/format"
22
+ include DeepSeek::Format
23
+
24
+ ##
25
+ # @param (see LLM::Provider#initialize)
26
+ # @return [LLM::DeepSeek]
27
+ def initialize(host: "api.deepseek.com", port: 443, ssl: true, **)
28
+ super
29
+ end
30
+
31
+ ##
32
+ # @raise [NotImplementedError]
33
+ def files
34
+ raise NotImplementedError
35
+ end
36
+
37
+ ##
38
+ # @raise [NotImplementedError]
39
+ def images
40
+ raise NotImplementedError
41
+ end
42
+
43
+ ##
44
+ # @raise [NotImplementedError]
45
+ def audio
46
+ raise NotImplementedError
47
+ end
48
+
49
+ ##
50
+ # @raise [NotImplementedError]
51
+ def moderations
52
+ raise NotImplementedError
53
+ end
54
+
55
+ ##
56
+ # @raise [NotImplementedError]
57
+ def responses
58
+ raise NotImplementedError
59
+ end
60
+
61
+ ##
62
+ # @raise [NotImplementedError]
63
+ def vector_stores
64
+ raise NotImplementedError
65
+ end
66
+
67
+ ##
68
+ # Returns the default model for chat completions
69
+ # @see https://api-docs.deepseek.com/quick_start/pricing deepseek-chat
70
+ # @return [String]
71
+ def default_model
72
+ "deepseek-chat"
73
+ end
74
+ end
75
+ end