llm.rb 0.11.0 → 0.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/LICENSE +0 -0
- data/README.md +56 -19
- data/lib/llm/bot/builder.rb +0 -0
- data/lib/llm/bot/conversable.rb +12 -4
- data/lib/llm/bot/prompt/completion.rb +18 -0
- data/lib/llm/bot/prompt/respond.rb +9 -0
- data/lib/llm/bot.rb +10 -17
- data/lib/llm/buffer.rb +15 -1
- data/lib/llm/error.rb +4 -0
- data/lib/llm/{event_handler.rb → eventhandler.rb} +0 -0
- data/lib/llm/eventstream/event.rb +0 -0
- data/lib/llm/eventstream/parser.rb +0 -0
- data/lib/llm/eventstream.rb +0 -0
- data/lib/llm/file.rb +4 -3
- data/lib/llm/function.rb +4 -4
- data/lib/llm/message.rb +0 -0
- data/lib/llm/mime.rb +88 -6
- data/lib/llm/multipart.rb +0 -1
- data/lib/llm/object/builder.rb +0 -0
- data/lib/llm/object/kernel.rb +0 -0
- data/lib/llm/object.rb +2 -3
- data/lib/llm/provider.rb +3 -3
- data/lib/llm/providers/anthropic/error_handler.rb +2 -0
- data/lib/llm/providers/anthropic/format/completion_format.rb +0 -0
- data/lib/llm/providers/anthropic/format.rb +0 -0
- data/lib/llm/providers/anthropic/models.rb +2 -2
- data/lib/llm/providers/anthropic/response/completion.rb +0 -0
- data/lib/llm/providers/anthropic/stream_parser.rb +0 -0
- data/lib/llm/providers/anthropic.rb +10 -1
- data/lib/llm/providers/deepseek/format/completion_format.rb +0 -0
- data/lib/llm/providers/deepseek/format.rb +0 -0
- data/lib/llm/providers/deepseek.rb +10 -1
- data/lib/llm/providers/gemini/audio.rb +3 -3
- data/lib/llm/providers/gemini/error_handler.rb +2 -0
- data/lib/llm/providers/gemini/files.rb +8 -20
- data/lib/llm/providers/gemini/format/completion_format.rb +2 -2
- data/lib/llm/providers/gemini/format.rb +0 -0
- data/lib/llm/providers/gemini/images.rb +4 -4
- data/lib/llm/providers/gemini/models.rb +2 -2
- data/lib/llm/providers/gemini/response/completion.rb +2 -0
- data/lib/llm/providers/gemini/response/embedding.rb +1 -1
- data/lib/llm/providers/gemini/response/file.rb +0 -0
- data/lib/llm/providers/gemini/response/image.rb +0 -0
- data/lib/llm/providers/gemini/stream_parser.rb +0 -0
- data/lib/llm/providers/gemini.rb +13 -21
- data/lib/llm/providers/llamacpp.rb +12 -1
- data/lib/llm/providers/ollama/error_handler.rb +2 -0
- data/lib/llm/providers/ollama/format/completion_format.rb +0 -0
- data/lib/llm/providers/ollama/format.rb +0 -0
- data/lib/llm/providers/ollama/models.rb +0 -0
- data/lib/llm/providers/ollama/response/completion.rb +0 -0
- data/lib/llm/providers/ollama/response/embedding.rb +1 -2
- data/lib/llm/providers/ollama/stream_parser.rb +0 -0
- data/lib/llm/providers/ollama.rb +8 -11
- data/lib/llm/providers/openai/audio.rb +4 -4
- data/lib/llm/providers/openai/error_handler.rb +13 -1
- data/lib/llm/providers/openai/files.rb +8 -19
- data/lib/llm/providers/openai/format/completion_format.rb +0 -0
- data/lib/llm/providers/openai/format/moderation_format.rb +0 -0
- data/lib/llm/providers/openai/format/respond_format.rb +0 -0
- data/lib/llm/providers/openai/format.rb +0 -0
- data/lib/llm/providers/openai/images.rb +10 -10
- data/lib/llm/providers/openai/models.rb +2 -2
- data/lib/llm/providers/openai/moderations.rb +0 -0
- data/lib/llm/providers/openai/response/audio.rb +0 -0
- data/lib/llm/providers/openai/response/completion.rb +2 -2
- data/lib/llm/providers/openai/response/embedding.rb +3 -3
- data/lib/llm/providers/openai/response/file.rb +0 -0
- data/lib/llm/providers/openai/response/image.rb +0 -0
- data/lib/llm/providers/openai/response/moderations.rb +0 -0
- data/lib/llm/providers/openai/response/responds.rb +0 -1
- data/lib/llm/providers/openai/responses.rb +6 -25
- data/lib/llm/providers/openai/stream_parser.rb +1 -0
- data/lib/llm/providers/openai/vector_stores.rb +85 -3
- data/lib/llm/providers/openai.rb +10 -1
- data/lib/llm/providers/xai/images.rb +58 -0
- data/lib/llm/providers/xai.rb +72 -0
- data/lib/llm/response.rb +5 -0
- data/lib/llm/{json/schema → schema}/array.rb +3 -3
- data/lib/llm/{json/schema → schema}/boolean.rb +3 -3
- data/lib/llm/{json/schema → schema}/integer.rb +6 -6
- data/lib/llm/{json/schema → schema}/leaf.rb +9 -9
- data/lib/llm/{json/schema → schema}/null.rb +3 -3
- data/lib/llm/{json/schema → schema}/number.rb +6 -6
- data/lib/llm/{json/schema → schema}/object.rb +3 -3
- data/lib/llm/{json/schema → schema}/string.rb +5 -5
- data/lib/llm/{json/schema → schema}/version.rb +1 -1
- data/lib/llm/{json/schema.rb → schema.rb} +10 -13
- data/lib/llm/utils.rb +0 -0
- data/lib/llm/version.rb +1 -1
- data/lib/llm.rb +11 -2
- data/llm.gemspec +4 -4
- metadata +22 -20
data/lib/llm.rb
CHANGED
@@ -2,7 +2,7 @@
|
|
2
2
|
|
3
3
|
module LLM
|
4
4
|
require "stringio"
|
5
|
-
require_relative "llm/
|
5
|
+
require_relative "llm/schema"
|
6
6
|
require_relative "llm/object"
|
7
7
|
require_relative "llm/version"
|
8
8
|
require_relative "llm/utils"
|
@@ -17,7 +17,7 @@ module LLM
|
|
17
17
|
require_relative "llm/buffer"
|
18
18
|
require_relative "llm/function"
|
19
19
|
require_relative "llm/eventstream"
|
20
|
-
require_relative "llm/
|
20
|
+
require_relative "llm/eventhandler"
|
21
21
|
|
22
22
|
module_function
|
23
23
|
|
@@ -69,6 +69,15 @@ module LLM
|
|
69
69
|
LLM::OpenAI.new(**)
|
70
70
|
end
|
71
71
|
|
72
|
+
##
|
73
|
+
# @param key (see LLM::XAI#initialize)
|
74
|
+
# @param host (see LLM::XAI#initialize)
|
75
|
+
# @return (see LLM::XAI#initialize)
|
76
|
+
def xai(**)
|
77
|
+
require_relative "llm/providers/xai" unless defined?(LLM::XAI)
|
78
|
+
LLM::XAI.new(**)
|
79
|
+
end
|
80
|
+
|
72
81
|
##
|
73
82
|
# Define a function
|
74
83
|
# @example
|
data/llm.gemspec
CHANGED
@@ -10,9 +10,9 @@ Gem::Specification.new do |spec|
|
|
10
10
|
|
11
11
|
spec.summary = <<~SUMMARY
|
12
12
|
llm.rb is a zero-dependency Ruby toolkit for Large Language Models that
|
13
|
-
includes OpenAI, Gemini, Anthropic, DeepSeek, Ollama, and
|
14
|
-
toolkit includes full support for chat, streaming, tool calling,
|
15
|
-
images, files, and JSON Schema
|
13
|
+
includes OpenAI, Gemini, Anthropic, xAI (grok), DeepSeek, Ollama, and
|
14
|
+
LlamaCpp. The toolkit includes full support for chat, streaming, tool calling,
|
15
|
+
audio, images, files, and structured outputs (JSON Schema).
|
16
16
|
SUMMARY
|
17
17
|
|
18
18
|
spec.description = spec.summary
|
@@ -37,7 +37,7 @@ Gem::Specification.new do |spec|
|
|
37
37
|
spec.add_development_dependency "test-cmd.rb", "~> 0.12.0"
|
38
38
|
spec.add_development_dependency "rake", "~> 13.0"
|
39
39
|
spec.add_development_dependency "rspec", "~> 3.0"
|
40
|
-
spec.add_development_dependency "standard", "~> 1.
|
40
|
+
spec.add_development_dependency "standard", "~> 1.50"
|
41
41
|
spec.add_development_dependency "vcr", "~> 6.0"
|
42
42
|
spec.add_development_dependency "dotenv", "~> 2.8"
|
43
43
|
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: llm.rb
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.13.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Antar Azri
|
@@ -114,14 +114,14 @@ dependencies:
|
|
114
114
|
requirements:
|
115
115
|
- - "~>"
|
116
116
|
- !ruby/object:Gem::Version
|
117
|
-
version: '1.
|
117
|
+
version: '1.50'
|
118
118
|
type: :development
|
119
119
|
prerelease: false
|
120
120
|
version_requirements: !ruby/object:Gem::Requirement
|
121
121
|
requirements:
|
122
122
|
- - "~>"
|
123
123
|
- !ruby/object:Gem::Version
|
124
|
-
version: '1.
|
124
|
+
version: '1.50'
|
125
125
|
- !ruby/object:Gem::Dependency
|
126
126
|
name: vcr
|
127
127
|
requirement: !ruby/object:Gem::Requirement
|
@@ -151,9 +151,9 @@ dependencies:
|
|
151
151
|
- !ruby/object:Gem::Version
|
152
152
|
version: '2.8'
|
153
153
|
description: llm.rb is a zero-dependency Ruby toolkit for Large Language Models that
|
154
|
-
includes OpenAI, Gemini, Anthropic, DeepSeek, Ollama, and LlamaCpp.
|
155
|
-
includes full support for chat, streaming, tool calling, audio, images,
|
156
|
-
JSON Schema
|
154
|
+
includes OpenAI, Gemini, Anthropic, xAI (grok), DeepSeek, Ollama, and LlamaCpp.
|
155
|
+
The toolkit includes full support for chat, streaming, tool calling, audio, images,
|
156
|
+
files, and structured outputs (JSON Schema).
|
157
157
|
email:
|
158
158
|
- azantar@proton.me
|
159
159
|
- 0x1eef@proton.me
|
@@ -171,22 +171,12 @@ files:
|
|
171
171
|
- lib/llm/bot/prompt/respond.rb
|
172
172
|
- lib/llm/buffer.rb
|
173
173
|
- lib/llm/error.rb
|
174
|
-
- lib/llm/
|
174
|
+
- lib/llm/eventhandler.rb
|
175
175
|
- lib/llm/eventstream.rb
|
176
176
|
- lib/llm/eventstream/event.rb
|
177
177
|
- lib/llm/eventstream/parser.rb
|
178
178
|
- lib/llm/file.rb
|
179
179
|
- lib/llm/function.rb
|
180
|
-
- lib/llm/json/schema.rb
|
181
|
-
- lib/llm/json/schema/array.rb
|
182
|
-
- lib/llm/json/schema/boolean.rb
|
183
|
-
- lib/llm/json/schema/integer.rb
|
184
|
-
- lib/llm/json/schema/leaf.rb
|
185
|
-
- lib/llm/json/schema/null.rb
|
186
|
-
- lib/llm/json/schema/number.rb
|
187
|
-
- lib/llm/json/schema/object.rb
|
188
|
-
- lib/llm/json/schema/string.rb
|
189
|
-
- lib/llm/json/schema/version.rb
|
190
180
|
- lib/llm/message.rb
|
191
181
|
- lib/llm/mime.rb
|
192
182
|
- lib/llm/multipart.rb
|
@@ -247,7 +237,19 @@ files:
|
|
247
237
|
- lib/llm/providers/openai/responses.rb
|
248
238
|
- lib/llm/providers/openai/stream_parser.rb
|
249
239
|
- lib/llm/providers/openai/vector_stores.rb
|
240
|
+
- lib/llm/providers/xai.rb
|
241
|
+
- lib/llm/providers/xai/images.rb
|
250
242
|
- lib/llm/response.rb
|
243
|
+
- lib/llm/schema.rb
|
244
|
+
- lib/llm/schema/array.rb
|
245
|
+
- lib/llm/schema/boolean.rb
|
246
|
+
- lib/llm/schema/integer.rb
|
247
|
+
- lib/llm/schema/leaf.rb
|
248
|
+
- lib/llm/schema/null.rb
|
249
|
+
- lib/llm/schema/number.rb
|
250
|
+
- lib/llm/schema/object.rb
|
251
|
+
- lib/llm/schema/string.rb
|
252
|
+
- lib/llm/schema/version.rb
|
251
253
|
- lib/llm/utils.rb
|
252
254
|
- lib/llm/version.rb
|
253
255
|
- llm.gemspec
|
@@ -274,7 +276,7 @@ requirements: []
|
|
274
276
|
rubygems_version: 3.6.9
|
275
277
|
specification_version: 4
|
276
278
|
summary: llm.rb is a zero-dependency Ruby toolkit for Large Language Models that includes
|
277
|
-
OpenAI, Gemini, Anthropic, DeepSeek, Ollama, and LlamaCpp. The toolkit
|
278
|
-
full support for chat, streaming, tool calling, audio, images, files, and
|
279
|
-
|
279
|
+
OpenAI, Gemini, Anthropic, xAI (grok), DeepSeek, Ollama, and LlamaCpp. The toolkit
|
280
|
+
includes full support for chat, streaming, tool calling, audio, images, files, and
|
281
|
+
structured outputs (JSON Schema).
|
280
282
|
test_files: []
|