llm.rb 4.7.0 → 4.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +335 -587
- data/data/anthropic.json +770 -0
- data/data/deepseek.json +75 -0
- data/data/google.json +1050 -0
- data/data/openai.json +1421 -0
- data/data/xai.json +792 -0
- data/data/zai.json +330 -0
- data/lib/llm/agent.rb +42 -41
- data/lib/llm/bot.rb +1 -263
- data/lib/llm/buffer.rb +7 -0
- data/lib/llm/{session → context}/deserializer.rb +4 -3
- data/lib/llm/context.rb +292 -0
- data/lib/llm/cost.rb +26 -0
- data/lib/llm/error.rb +8 -0
- data/lib/llm/eventstream/parser.rb +0 -5
- data/lib/llm/function/array.rb +61 -0
- data/lib/llm/function/fiber_group.rb +91 -0
- data/lib/llm/function/task_group.rb +89 -0
- data/lib/llm/function/thread_group.rb +94 -0
- data/lib/llm/function.rb +75 -10
- data/lib/llm/mcp/command.rb +108 -0
- data/lib/llm/mcp/error.rb +31 -0
- data/lib/llm/mcp/pipe.rb +82 -0
- data/lib/llm/mcp/rpc.rb +118 -0
- data/lib/llm/mcp/transport/stdio.rb +85 -0
- data/lib/llm/mcp.rb +102 -0
- data/lib/llm/message.rb +13 -11
- data/lib/llm/model.rb +115 -0
- data/lib/llm/prompt.rb +17 -7
- data/lib/llm/provider.rb +60 -32
- data/lib/llm/providers/anthropic/error_handler.rb +1 -1
- data/lib/llm/providers/anthropic/files.rb +3 -3
- data/lib/llm/providers/anthropic/models.rb +1 -1
- data/lib/llm/providers/anthropic/request_adapter.rb +20 -3
- data/lib/llm/providers/anthropic/response_adapter/models.rb +13 -0
- data/lib/llm/providers/anthropic/response_adapter.rb +2 -0
- data/lib/llm/providers/anthropic.rb +21 -5
- data/lib/llm/providers/deepseek.rb +10 -3
- data/lib/llm/providers/{gemini → google}/audio.rb +6 -6
- data/lib/llm/providers/{gemini → google}/error_handler.rb +20 -5
- data/lib/llm/providers/{gemini → google}/files.rb +11 -11
- data/lib/llm/providers/{gemini → google}/images.rb +7 -7
- data/lib/llm/providers/{gemini → google}/models.rb +5 -5
- data/lib/llm/providers/{gemini → google}/request_adapter/completion.rb +7 -3
- data/lib/llm/providers/{gemini → google}/request_adapter.rb +1 -1
- data/lib/llm/providers/{gemini → google}/response_adapter/completion.rb +7 -7
- data/lib/llm/providers/{gemini → google}/response_adapter/embedding.rb +1 -1
- data/lib/llm/providers/{gemini → google}/response_adapter/file.rb +1 -1
- data/lib/llm/providers/{gemini → google}/response_adapter/files.rb +1 -1
- data/lib/llm/providers/{gemini → google}/response_adapter/image.rb +1 -1
- data/lib/llm/providers/google/response_adapter/models.rb +13 -0
- data/lib/llm/providers/{gemini → google}/response_adapter/web_search.rb +2 -2
- data/lib/llm/providers/{gemini → google}/response_adapter.rb +8 -8
- data/lib/llm/providers/{gemini → google}/stream_parser.rb +3 -3
- data/lib/llm/providers/{gemini.rb → google.rb} +41 -26
- data/lib/llm/providers/llamacpp.rb +10 -3
- data/lib/llm/providers/ollama/error_handler.rb +1 -1
- data/lib/llm/providers/ollama/models.rb +1 -1
- data/lib/llm/providers/ollama/response_adapter/models.rb +13 -0
- data/lib/llm/providers/ollama/response_adapter.rb +2 -0
- data/lib/llm/providers/ollama.rb +19 -4
- data/lib/llm/providers/openai/error_handler.rb +18 -3
- data/lib/llm/providers/openai/files.rb +3 -3
- data/lib/llm/providers/openai/images.rb +17 -11
- data/lib/llm/providers/openai/models.rb +1 -1
- data/lib/llm/providers/openai/response_adapter/completion.rb +9 -1
- data/lib/llm/providers/openai/response_adapter/models.rb +13 -0
- data/lib/llm/providers/openai/response_adapter/responds.rb +9 -1
- data/lib/llm/providers/openai/response_adapter.rb +2 -0
- data/lib/llm/providers/openai/responses.rb +16 -1
- data/lib/llm/providers/openai/stream_parser.rb +2 -0
- data/lib/llm/providers/openai.rb +28 -6
- data/lib/llm/providers/xai/images.rb +7 -6
- data/lib/llm/providers/xai.rb +10 -3
- data/lib/llm/providers/zai.rb +9 -2
- data/lib/llm/registry.rb +81 -0
- data/lib/llm/schema/enum.rb +16 -0
- data/lib/llm/schema/parser.rb +109 -0
- data/lib/llm/schema.rb +5 -0
- data/lib/llm/server_tool.rb +5 -5
- data/lib/llm/session.rb +10 -1
- data/lib/llm/tool/param.rb +1 -1
- data/lib/llm/tool.rb +86 -5
- data/lib/llm/tracer/langsmith.rb +144 -0
- data/lib/llm/tracer/logger.rb +9 -1
- data/lib/llm/tracer/null.rb +8 -0
- data/lib/llm/tracer/telemetry.rb +98 -78
- data/lib/llm/tracer.rb +108 -4
- data/lib/llm/usage.rb +5 -0
- data/lib/llm/version.rb +1 -1
- data/lib/llm.rb +40 -6
- data/llm.gemspec +45 -8
- metadata +87 -28
- data/lib/llm/providers/gemini/response_adapter/models.rb +0 -15
data/lib/llm.rb
CHANGED
|
@@ -6,10 +6,13 @@ module LLM
|
|
|
6
6
|
require_relative "llm/tracer"
|
|
7
7
|
require_relative "llm/error"
|
|
8
8
|
require_relative "llm/contract"
|
|
9
|
+
require_relative "llm/registry"
|
|
10
|
+
require_relative "llm/cost"
|
|
9
11
|
require_relative "llm/usage"
|
|
10
12
|
require_relative "llm/prompt"
|
|
11
13
|
require_relative "llm/schema"
|
|
12
14
|
require_relative "llm/object"
|
|
15
|
+
require_relative "llm/model"
|
|
13
16
|
require_relative "llm/version"
|
|
14
17
|
require_relative "llm/utils"
|
|
15
18
|
require_relative "llm/message"
|
|
@@ -18,7 +21,7 @@ module LLM
|
|
|
18
21
|
require_relative "llm/multipart"
|
|
19
22
|
require_relative "llm/file"
|
|
20
23
|
require_relative "llm/provider"
|
|
21
|
-
require_relative "llm/
|
|
24
|
+
require_relative "llm/context"
|
|
22
25
|
require_relative "llm/agent"
|
|
23
26
|
require_relative "llm/buffer"
|
|
24
27
|
require_relative "llm/function"
|
|
@@ -29,7 +32,22 @@ module LLM
|
|
|
29
32
|
|
|
30
33
|
##
|
|
31
34
|
# Thread-safe monitors for different contexts
|
|
32
|
-
@monitors = {require: Monitor.new, clients: Monitor.new, inherited: Monitor.new}
|
|
35
|
+
@monitors = {require: Monitor.new, clients: Monitor.new, inherited: Monitor.new, registry: Monitor.new}
|
|
36
|
+
|
|
37
|
+
##
|
|
38
|
+
# Model registry
|
|
39
|
+
@registry = {}
|
|
40
|
+
|
|
41
|
+
##
|
|
42
|
+
# @param [Symbol, LLM::Provider] llm
|
|
43
|
+
# The name of a provider, or an instance of LLM::Provider
|
|
44
|
+
# @return [LLM::Object]
|
|
45
|
+
def self.registry_for(llm)
|
|
46
|
+
lock(:registry) do
|
|
47
|
+
name = Symbol === llm ? llm : llm.name
|
|
48
|
+
@registry[name] ||= Registry.for(name)
|
|
49
|
+
end
|
|
50
|
+
end
|
|
33
51
|
|
|
34
52
|
module_function
|
|
35
53
|
|
|
@@ -75,10 +93,10 @@ module LLM
|
|
|
75
93
|
|
|
76
94
|
##
|
|
77
95
|
# @param (see LLM::Provider#initialize)
|
|
78
|
-
# @return (see LLM::
|
|
79
|
-
def
|
|
80
|
-
lock(:require) { require_relative "llm/providers/
|
|
81
|
-
LLM::
|
|
96
|
+
# @return (see LLM::Google#initialize)
|
|
97
|
+
def google(**)
|
|
98
|
+
lock(:require) { require_relative "llm/providers/google" unless defined?(LLM::Google) }
|
|
99
|
+
LLM::Google.new(**)
|
|
82
100
|
end
|
|
83
101
|
|
|
84
102
|
##
|
|
@@ -131,6 +149,22 @@ module LLM
|
|
|
131
149
|
LLM::ZAI.new(**)
|
|
132
150
|
end
|
|
133
151
|
|
|
152
|
+
##
|
|
153
|
+
# @param [LLM::Provider, nil] llm
|
|
154
|
+
# The provider to use for MCP transports that need one
|
|
155
|
+
# @param [Hash, nil] stdio
|
|
156
|
+
# @option stdio [Array<String>] :argv
|
|
157
|
+
# The command to run for the MCP process
|
|
158
|
+
# @option stdio [Hash] :env
|
|
159
|
+
# The environment variables to set for the MCP process
|
|
160
|
+
# @option stdio [String, nil] :cwd
|
|
161
|
+
# The working directory for the MCP process
|
|
162
|
+
# @return [LLM::MCP]
|
|
163
|
+
def mcp(llm = nil, **)
|
|
164
|
+
lock(:require) { require_relative "llm/mcp" unless defined?(LLM::MCP) }
|
|
165
|
+
LLM::MCP.new(llm, **)
|
|
166
|
+
end
|
|
167
|
+
|
|
134
168
|
##
|
|
135
169
|
# Define a function
|
|
136
170
|
# @example
|
data/llm.gemspec
CHANGED
|
@@ -5,17 +5,52 @@ require_relative "lib/llm/version"
|
|
|
5
5
|
Gem::Specification.new do |spec|
|
|
6
6
|
spec.name = "llm.rb"
|
|
7
7
|
spec.version = LLM::VERSION
|
|
8
|
-
spec.authors = ["Antar Azri", "0x1eef"]
|
|
9
|
-
spec.email = ["azantar@proton.me", "0x1eef@
|
|
8
|
+
spec.authors = ["Antar Azri", "0x1eef", "Christos Maris", "Rodrigo Serrano"]
|
|
9
|
+
spec.email = ["azantar@proton.me", "0x1eef@hardenedbsd.org"]
|
|
10
10
|
|
|
11
11
|
spec.summary = <<~SUMMARY
|
|
12
|
-
llm.rb is a
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
12
|
+
llm.rb is a Ruby-centric toolkit for building real LLM-powered systems — where
|
|
13
|
+
LLMs are part of your architecture, not just API calls. It gives you explicit
|
|
14
|
+
control over contexts, tools, concurrency, and providers, so you can compose
|
|
15
|
+
reliable, production-ready workflows without hidden abstractions.
|
|
16
16
|
SUMMARY
|
|
17
17
|
|
|
18
|
-
spec.description =
|
|
18
|
+
spec.description = <<~DESCRIPTION
|
|
19
|
+
llm.rb is a Ruby-centric toolkit for building real LLM-powered systems — where
|
|
20
|
+
LLMs are part of your architecture, not just API calls. It gives you explicit
|
|
21
|
+
control over contexts, tools, concurrency, and providers, so you can compose
|
|
22
|
+
reliable, production-ready workflows without hidden abstractions.
|
|
23
|
+
|
|
24
|
+
Built for engineers who want to understand and control their LLM systems. No
|
|
25
|
+
frameworks, no hidden magic — just composable primitives for building real
|
|
26
|
+
applications, from scripts to full systems like Relay.
|
|
27
|
+
|
|
28
|
+
## Key Features
|
|
29
|
+
|
|
30
|
+
- **Contexts are central** — Hold history, tools, schema, usage, cost, persistence, and execution state
|
|
31
|
+
- **Tool execution is explicit** — Run local, provider-native, and MCP tools sequentially or concurrently
|
|
32
|
+
- **One API across providers** — Unified interface for OpenAI, Anthropic, Google, xAI, zAI, DeepSeek, Ollama, and LlamaCpp
|
|
33
|
+
- **Thread-safe where it matters** — Providers are shareable, while contexts stay isolated and stateful
|
|
34
|
+
- **Production-ready** — Cost tracking, observability, persistence, and performance tuning built in
|
|
35
|
+
- **Stdlib-only by default** — Runs on Ruby standard library, with optional features loaded only when used
|
|
36
|
+
|
|
37
|
+
## Capabilities
|
|
38
|
+
|
|
39
|
+
- Chat & Contexts with persistence
|
|
40
|
+
- Streaming responses
|
|
41
|
+
- Tool calling with JSON Schema validation
|
|
42
|
+
- Concurrent execution (threads, fibers, async tasks)
|
|
43
|
+
- Agents with auto-execution
|
|
44
|
+
- Structured outputs
|
|
45
|
+
- MCP (Model Context Protocol) support
|
|
46
|
+
- Multimodal inputs (text, images, audio, documents)
|
|
47
|
+
- Audio generation, transcription, translation
|
|
48
|
+
- Image generation and editing
|
|
49
|
+
- Files API for document processing
|
|
50
|
+
- Embeddings and vector stores
|
|
51
|
+
- Local model registry for capabilities, limits, and pricing
|
|
52
|
+
DESCRIPTION
|
|
53
|
+
|
|
19
54
|
spec.license = "0BSD"
|
|
20
55
|
spec.required_ruby_version = ">= 3.2.0"
|
|
21
56
|
|
|
@@ -23,10 +58,12 @@ Gem::Specification.new do |spec|
|
|
|
23
58
|
spec.metadata["homepage_uri"] = "https://github.com/llmrb/llm.rb"
|
|
24
59
|
spec.metadata["source_code_uri"] = "https://github.com/llmrb/llm.rb"
|
|
25
60
|
spec.metadata["documentation_uri"] = "https://0x1eef.github.io/x/llm.rb"
|
|
61
|
+
spec.metadata["changelog_uri"] = "https://0x1eef.github.io/x/llm.rb/file.CHANGELOG.html"
|
|
26
62
|
|
|
27
63
|
spec.files = Dir[
|
|
28
64
|
"README.md", "LICENSE",
|
|
29
65
|
"lib/*.rb", "lib/**/*.rb",
|
|
66
|
+
"data/*.json",
|
|
30
67
|
"llm.gemspec"
|
|
31
68
|
]
|
|
32
69
|
spec.require_paths = ["lib"]
|
|
@@ -44,4 +81,4 @@ Gem::Specification.new do |spec|
|
|
|
44
81
|
spec.add_development_dependency "net-http-persistent", "~> 4.0"
|
|
45
82
|
spec.add_development_dependency "opentelemetry-sdk", "~> 1.10"
|
|
46
83
|
spec.add_development_dependency "logger", "~> 1.7"
|
|
47
|
-
end
|
|
84
|
+
end
|
metadata
CHANGED
|
@@ -1,11 +1,13 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: llm.rb
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 4.
|
|
4
|
+
version: 4.9.0
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- Antar Azri
|
|
8
8
|
- '0x1eef'
|
|
9
|
+
- Christos Maris
|
|
10
|
+
- Rodrigo Serrano
|
|
9
11
|
bindir: bin
|
|
10
12
|
cert_chain: []
|
|
11
13
|
date: 1980-01-02 00:00:00.000000000 Z
|
|
@@ -192,26 +194,65 @@ dependencies:
|
|
|
192
194
|
- - "~>"
|
|
193
195
|
- !ruby/object:Gem::Version
|
|
194
196
|
version: '1.7'
|
|
195
|
-
description:
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
197
|
+
description: |
|
|
198
|
+
llm.rb is a Ruby-centric toolkit for building real LLM-powered systems — where
|
|
199
|
+
LLMs are part of your architecture, not just API calls. It gives you explicit
|
|
200
|
+
control over contexts, tools, concurrency, and providers, so you can compose
|
|
201
|
+
reliable, production-ready workflows without hidden abstractions.
|
|
202
|
+
|
|
203
|
+
Built for engineers who want to understand and control their LLM systems. No
|
|
204
|
+
frameworks, no hidden magic — just composable primitives for building real
|
|
205
|
+
applications, from scripts to full systems like Relay.
|
|
206
|
+
|
|
207
|
+
## Key Features
|
|
208
|
+
|
|
209
|
+
- **Contexts are central** — Hold history, tools, schema, usage, cost, persistence, and execution state
|
|
210
|
+
- **Tool execution is explicit** — Run local, provider-native, and MCP tools sequentially or concurrently
|
|
211
|
+
- **One API across providers** — Unified interface for OpenAI, Anthropic, Google, xAI, zAI, DeepSeek, Ollama, and LlamaCpp
|
|
212
|
+
- **Thread-safe where it matters** — Providers are shareable, while contexts stay isolated and stateful
|
|
213
|
+
- **Production-ready** — Cost tracking, observability, persistence, and performance tuning built in
|
|
214
|
+
- **Stdlib-only by default** — Runs on Ruby standard library, with optional features loaded only when used
|
|
215
|
+
|
|
216
|
+
## Capabilities
|
|
217
|
+
|
|
218
|
+
- Chat & Contexts with persistence
|
|
219
|
+
- Streaming responses
|
|
220
|
+
- Tool calling with JSON Schema validation
|
|
221
|
+
- Concurrent execution (threads, fibers, async tasks)
|
|
222
|
+
- Agents with auto-execution
|
|
223
|
+
- Structured outputs
|
|
224
|
+
- MCP (Model Context Protocol) support
|
|
225
|
+
- Multimodal inputs (text, images, audio, documents)
|
|
226
|
+
- Audio generation, transcription, translation
|
|
227
|
+
- Image generation and editing
|
|
228
|
+
- Files API for document processing
|
|
229
|
+
- Embeddings and vector stores
|
|
230
|
+
- Local model registry for capabilities, limits, and pricing
|
|
199
231
|
email:
|
|
200
232
|
- azantar@proton.me
|
|
201
|
-
- 0x1eef@
|
|
233
|
+
- 0x1eef@hardenedbsd.org
|
|
202
234
|
executables: []
|
|
203
235
|
extensions: []
|
|
204
236
|
extra_rdoc_files: []
|
|
205
237
|
files:
|
|
206
238
|
- LICENSE
|
|
207
239
|
- README.md
|
|
240
|
+
- data/anthropic.json
|
|
241
|
+
- data/deepseek.json
|
|
242
|
+
- data/google.json
|
|
243
|
+
- data/openai.json
|
|
244
|
+
- data/xai.json
|
|
245
|
+
- data/zai.json
|
|
208
246
|
- lib/llm.rb
|
|
209
247
|
- lib/llm/agent.rb
|
|
210
248
|
- lib/llm/bot.rb
|
|
211
249
|
- lib/llm/buffer.rb
|
|
212
250
|
- lib/llm/client.rb
|
|
251
|
+
- lib/llm/context.rb
|
|
252
|
+
- lib/llm/context/deserializer.rb
|
|
213
253
|
- lib/llm/contract.rb
|
|
214
254
|
- lib/llm/contract/completion.rb
|
|
255
|
+
- lib/llm/cost.rb
|
|
215
256
|
- lib/llm/error.rb
|
|
216
257
|
- lib/llm/eventhandler.rb
|
|
217
258
|
- lib/llm/eventstream.rb
|
|
@@ -219,10 +260,21 @@ files:
|
|
|
219
260
|
- lib/llm/eventstream/parser.rb
|
|
220
261
|
- lib/llm/file.rb
|
|
221
262
|
- lib/llm/function.rb
|
|
263
|
+
- lib/llm/function/array.rb
|
|
264
|
+
- lib/llm/function/fiber_group.rb
|
|
265
|
+
- lib/llm/function/task_group.rb
|
|
266
|
+
- lib/llm/function/thread_group.rb
|
|
222
267
|
- lib/llm/function/tracing.rb
|
|
223
268
|
- lib/llm/json_adapter.rb
|
|
269
|
+
- lib/llm/mcp.rb
|
|
270
|
+
- lib/llm/mcp/command.rb
|
|
271
|
+
- lib/llm/mcp/error.rb
|
|
272
|
+
- lib/llm/mcp/pipe.rb
|
|
273
|
+
- lib/llm/mcp/rpc.rb
|
|
274
|
+
- lib/llm/mcp/transport/stdio.rb
|
|
224
275
|
- lib/llm/message.rb
|
|
225
276
|
- lib/llm/mime.rb
|
|
277
|
+
- lib/llm/model.rb
|
|
226
278
|
- lib/llm/multipart.rb
|
|
227
279
|
- lib/llm/multipart/enumerator_io.rb
|
|
228
280
|
- lib/llm/object.rb
|
|
@@ -240,28 +292,29 @@ files:
|
|
|
240
292
|
- lib/llm/providers/anthropic/response_adapter/completion.rb
|
|
241
293
|
- lib/llm/providers/anthropic/response_adapter/enumerable.rb
|
|
242
294
|
- lib/llm/providers/anthropic/response_adapter/file.rb
|
|
295
|
+
- lib/llm/providers/anthropic/response_adapter/models.rb
|
|
243
296
|
- lib/llm/providers/anthropic/response_adapter/web_search.rb
|
|
244
297
|
- lib/llm/providers/anthropic/stream_parser.rb
|
|
245
298
|
- lib/llm/providers/deepseek.rb
|
|
246
299
|
- lib/llm/providers/deepseek/request_adapter.rb
|
|
247
300
|
- lib/llm/providers/deepseek/request_adapter/completion.rb
|
|
248
|
-
- lib/llm/providers/
|
|
249
|
-
- lib/llm/providers/
|
|
250
|
-
- lib/llm/providers/
|
|
251
|
-
- lib/llm/providers/
|
|
252
|
-
- lib/llm/providers/
|
|
253
|
-
- lib/llm/providers/
|
|
254
|
-
- lib/llm/providers/
|
|
255
|
-
- lib/llm/providers/
|
|
256
|
-
- lib/llm/providers/
|
|
257
|
-
- lib/llm/providers/
|
|
258
|
-
- lib/llm/providers/
|
|
259
|
-
- lib/llm/providers/
|
|
260
|
-
- lib/llm/providers/
|
|
261
|
-
- lib/llm/providers/
|
|
262
|
-
- lib/llm/providers/
|
|
263
|
-
- lib/llm/providers/
|
|
264
|
-
- lib/llm/providers/
|
|
301
|
+
- lib/llm/providers/google.rb
|
|
302
|
+
- lib/llm/providers/google/audio.rb
|
|
303
|
+
- lib/llm/providers/google/error_handler.rb
|
|
304
|
+
- lib/llm/providers/google/files.rb
|
|
305
|
+
- lib/llm/providers/google/images.rb
|
|
306
|
+
- lib/llm/providers/google/models.rb
|
|
307
|
+
- lib/llm/providers/google/request_adapter.rb
|
|
308
|
+
- lib/llm/providers/google/request_adapter/completion.rb
|
|
309
|
+
- lib/llm/providers/google/response_adapter.rb
|
|
310
|
+
- lib/llm/providers/google/response_adapter/completion.rb
|
|
311
|
+
- lib/llm/providers/google/response_adapter/embedding.rb
|
|
312
|
+
- lib/llm/providers/google/response_adapter/file.rb
|
|
313
|
+
- lib/llm/providers/google/response_adapter/files.rb
|
|
314
|
+
- lib/llm/providers/google/response_adapter/image.rb
|
|
315
|
+
- lib/llm/providers/google/response_adapter/models.rb
|
|
316
|
+
- lib/llm/providers/google/response_adapter/web_search.rb
|
|
317
|
+
- lib/llm/providers/google/stream_parser.rb
|
|
265
318
|
- lib/llm/providers/llamacpp.rb
|
|
266
319
|
- lib/llm/providers/ollama.rb
|
|
267
320
|
- lib/llm/providers/ollama/error_handler.rb
|
|
@@ -271,6 +324,7 @@ files:
|
|
|
271
324
|
- lib/llm/providers/ollama/response_adapter.rb
|
|
272
325
|
- lib/llm/providers/ollama/response_adapter/completion.rb
|
|
273
326
|
- lib/llm/providers/ollama/response_adapter/embedding.rb
|
|
327
|
+
- lib/llm/providers/ollama/response_adapter/models.rb
|
|
274
328
|
- lib/llm/providers/ollama/stream_parser.rb
|
|
275
329
|
- lib/llm/providers/openai.rb
|
|
276
330
|
- lib/llm/providers/openai/audio.rb
|
|
@@ -290,6 +344,7 @@ files:
|
|
|
290
344
|
- lib/llm/providers/openai/response_adapter/enumerable.rb
|
|
291
345
|
- lib/llm/providers/openai/response_adapter/file.rb
|
|
292
346
|
- lib/llm/providers/openai/response_adapter/image.rb
|
|
347
|
+
- lib/llm/providers/openai/response_adapter/models.rb
|
|
293
348
|
- lib/llm/providers/openai/response_adapter/moderations.rb
|
|
294
349
|
- lib/llm/providers/openai/response_adapter/responds.rb
|
|
295
350
|
- lib/llm/providers/openai/response_adapter/web_search.rb
|
|
@@ -300,23 +355,26 @@ files:
|
|
|
300
355
|
- lib/llm/providers/xai.rb
|
|
301
356
|
- lib/llm/providers/xai/images.rb
|
|
302
357
|
- lib/llm/providers/zai.rb
|
|
358
|
+
- lib/llm/registry.rb
|
|
303
359
|
- lib/llm/response.rb
|
|
304
360
|
- lib/llm/schema.rb
|
|
305
361
|
- lib/llm/schema/array.rb
|
|
306
362
|
- lib/llm/schema/boolean.rb
|
|
363
|
+
- lib/llm/schema/enum.rb
|
|
307
364
|
- lib/llm/schema/integer.rb
|
|
308
365
|
- lib/llm/schema/leaf.rb
|
|
309
366
|
- lib/llm/schema/null.rb
|
|
310
367
|
- lib/llm/schema/number.rb
|
|
311
368
|
- lib/llm/schema/object.rb
|
|
369
|
+
- lib/llm/schema/parser.rb
|
|
312
370
|
- lib/llm/schema/string.rb
|
|
313
371
|
- lib/llm/schema/version.rb
|
|
314
372
|
- lib/llm/server_tool.rb
|
|
315
373
|
- lib/llm/session.rb
|
|
316
|
-
- lib/llm/session/deserializer.rb
|
|
317
374
|
- lib/llm/tool.rb
|
|
318
375
|
- lib/llm/tool/param.rb
|
|
319
376
|
- lib/llm/tracer.rb
|
|
377
|
+
- lib/llm/tracer/langsmith.rb
|
|
320
378
|
- lib/llm/tracer/logger.rb
|
|
321
379
|
- lib/llm/tracer/null.rb
|
|
322
380
|
- lib/llm/tracer/telemetry.rb
|
|
@@ -331,6 +389,7 @@ metadata:
|
|
|
331
389
|
homepage_uri: https://github.com/llmrb/llm.rb
|
|
332
390
|
source_code_uri: https://github.com/llmrb/llm.rb
|
|
333
391
|
documentation_uri: https://0x1eef.github.io/x/llm.rb
|
|
392
|
+
changelog_uri: https://0x1eef.github.io/x/llm.rb/file.CHANGELOG.html
|
|
334
393
|
rdoc_options: []
|
|
335
394
|
require_paths:
|
|
336
395
|
- lib
|
|
@@ -347,8 +406,8 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
|
347
406
|
requirements: []
|
|
348
407
|
rubygems_version: 3.6.9
|
|
349
408
|
specification_version: 4
|
|
350
|
-
summary: llm.rb is a
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
409
|
+
summary: llm.rb is a Ruby-centric toolkit for building real LLM-powered systems —
|
|
410
|
+
where LLMs are part of your architecture, not just API calls. It gives you explicit
|
|
411
|
+
control over contexts, tools, concurrency, and providers, so you can compose reliable,
|
|
412
|
+
production-ready workflows without hidden abstractions.
|
|
354
413
|
test_files: []
|
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
# frozen_string_literal: true
|
|
2
|
-
|
|
3
|
-
module LLM::Gemini::ResponseAdapter
|
|
4
|
-
module Models
|
|
5
|
-
include ::Enumerable
|
|
6
|
-
def each(&)
|
|
7
|
-
return enum_for(:each) unless block_given?
|
|
8
|
-
models.each { yield(_1) }
|
|
9
|
-
end
|
|
10
|
-
|
|
11
|
-
def models
|
|
12
|
-
body.models || []
|
|
13
|
-
end
|
|
14
|
-
end
|
|
15
|
-
end
|