llm.rb 5.0.0 → 5.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +21 -0
- data/README.md +1 -1
- data/lib/llm/context.rb +4 -4
- data/lib/llm/providers/anthropic/stream_parser.rb +1 -1
- data/lib/llm/providers/google/stream_parser.rb +1 -1
- data/lib/llm/providers/openai/responses/stream_parser.rb +1 -1
- data/lib/llm/providers/openai/stream_parser.rb +1 -1
- data/lib/llm/stream.rb +34 -6
- data/lib/llm/version.rb +1 -1
- metadata +1 -1
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: 56ddedb75f6c791cc42bca736bc62360ba4850a3a204f9a82288e8c6ea977eeb
|
|
4
|
+
data.tar.gz: 3881b731dacd921e258eac954c4468d052e673e48ad53c63ae1a246973c84d33
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: a8838f57a1232afc42448d28a0f3f7b8907c2a527be284579b3af56e398edda50d7cc02a8dda2794c65096699831058494cccae3f8a116b538f76bc42127eba8
|
|
7
|
+
data.tar.gz: ef49e8046b4aab4e59b252ffdbf16135673d227b4124bf45bf5c31856c49822168182928b56fdc7428d2056dbae7c211fac0d6d8ef3eba48a7aca47420bb96e7
|
data/CHANGELOG.md
CHANGED
|
@@ -2,8 +2,29 @@
|
|
|
2
2
|
|
|
3
3
|
## Unreleased
|
|
4
4
|
|
|
5
|
+
Changes since `v5.1.0`.
|
|
6
|
+
|
|
7
|
+
## v5.1.0
|
|
8
|
+
|
|
5
9
|
Changes since `v5.0.0`.
|
|
6
10
|
|
|
11
|
+
This release tightens streamed tool execution around the actual request-local
|
|
12
|
+
runtime state. It fixes streamed resolution of per-request tools and makes
|
|
13
|
+
that streamed path work cleanly with `LLM.function(...)`, MCP tools, bound
|
|
14
|
+
tool instances, and normal tool classes.
|
|
15
|
+
|
|
16
|
+
### Fix
|
|
17
|
+
|
|
18
|
+
* **Resolve request-local tools during streaming** <br>
|
|
19
|
+
Resolve streamed tool calls through `LLM::Stream` request-local tools
|
|
20
|
+
before falling back to the global registry, so per-request tools and bound
|
|
21
|
+
tool instances work correctly during streaming.
|
|
22
|
+
|
|
23
|
+
* **Support `LLM.function(...)` and MCP tools in streamed tool resolution** <br>
|
|
24
|
+
Let streamed tool resolution use the current request tool set, so
|
|
25
|
+
`LLM.function(...)`, MCP tools, bound tool instances, and normal
|
|
26
|
+
`LLM::Tool` classes all work through the same streamed tool path.
|
|
27
|
+
|
|
7
28
|
## v5.0.0
|
|
8
29
|
|
|
9
30
|
Changes since `v4.23.0`.
|
data/README.md
CHANGED
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
<p align="center">
|
|
5
5
|
<a href="https://0x1eef.github.io/x/llm.rb?rebuild=1"><img src="https://img.shields.io/badge/docs-0x1eef.github.io-blue.svg" alt="RubyDoc"></a>
|
|
6
6
|
<a href="https://opensource.org/license/0bsd"><img src="https://img.shields.io/badge/License-0BSD-orange.svg?" alt="License"></a>
|
|
7
|
-
<a href="https://github.com/llmrb/llm.rb/tags"><img src="https://img.shields.io/badge/version-5.
|
|
7
|
+
<a href="https://github.com/llmrb/llm.rb/tags"><img src="https://img.shields.io/badge/version-5.1.0-green.svg?" alt="Version"></a>
|
|
8
8
|
</p>
|
|
9
9
|
|
|
10
10
|
## About
|
data/lib/llm/context.rb
CHANGED
|
@@ -177,7 +177,7 @@ module LLM
|
|
|
177
177
|
params = params.merge(messages: @messages.to_a)
|
|
178
178
|
params = @params.merge(params)
|
|
179
179
|
prompt, params = transform(prompt, params)
|
|
180
|
-
bind!(params[:stream], params[:model])
|
|
180
|
+
bind!(params[:stream], params[:model], params[:tools])
|
|
181
181
|
res = @llm.complete(prompt, params)
|
|
182
182
|
role = params[:role] || @llm.user_role
|
|
183
183
|
role = @llm.tool_role if params[:role].nil? && [*prompt].grep(LLM::Function::Return).any?
|
|
@@ -205,7 +205,7 @@ module LLM
|
|
|
205
205
|
compactor.compact!(prompt) if compactor.compact?(prompt)
|
|
206
206
|
params = @params.merge(params)
|
|
207
207
|
prompt, params = transform(prompt, params)
|
|
208
|
-
bind!(params[:stream], params[:model])
|
|
208
|
+
bind!(params[:stream], params[:model], params[:tools])
|
|
209
209
|
res_id = params[:store] == false ? nil : @messages.find(&:assistant?)&.response&.response_id
|
|
210
210
|
params = params.merge(previous_response_id: res_id, input: @messages.to_a).compact
|
|
211
211
|
res = @llm.responses.create(prompt, params)
|
|
@@ -459,11 +459,12 @@ module LLM
|
|
|
459
459
|
|
|
460
460
|
private
|
|
461
461
|
|
|
462
|
-
def bind!(stream, model)
|
|
462
|
+
def bind!(stream, model, tools)
|
|
463
463
|
return unless LLM::Stream === stream
|
|
464
464
|
stream.extra[:ctx] = self
|
|
465
465
|
stream.extra[:tracer] = tracer
|
|
466
466
|
stream.extra[:model] = model
|
|
467
|
+
stream.extra[:tools] = tools
|
|
467
468
|
end
|
|
468
469
|
|
|
469
470
|
def queue
|
|
@@ -494,7 +495,6 @@ module LLM
|
|
|
494
495
|
message: warning
|
|
495
496
|
})
|
|
496
497
|
end
|
|
497
|
-
|
|
498
498
|
end
|
|
499
499
|
|
|
500
500
|
# Backward-compatible alias
|
|
@@ -105,7 +105,7 @@ class LLM::Anthropic
|
|
|
105
105
|
end
|
|
106
106
|
|
|
107
107
|
def resolve_tool(tool)
|
|
108
|
-
registered =
|
|
108
|
+
registered = @stream.find_tool(tool["name"])
|
|
109
109
|
fn = (registered || LLM::Function.new(tool["name"])).dup.tap do |fn|
|
|
110
110
|
fn.id = tool["id"]
|
|
111
111
|
fn.arguments = LLM::Anthropic.parse_tool_input(tool["input"])
|
|
@@ -153,7 +153,7 @@ class LLM::Google
|
|
|
153
153
|
|
|
154
154
|
def resolve_tool(part, cindex, pindex)
|
|
155
155
|
call = part["functionCall"]
|
|
156
|
-
registered =
|
|
156
|
+
registered = @stream.find_tool(call["name"])
|
|
157
157
|
fn = (registered || LLM::Function.new(call["name"])).dup.tap do |fn|
|
|
158
158
|
fn.id = LLM::Google.tool_id(part:, cindex:, pindex:)
|
|
159
159
|
fn.arguments = call["args"]
|
|
@@ -269,7 +269,7 @@ class LLM::OpenAI
|
|
|
269
269
|
# @group Resolvers
|
|
270
270
|
|
|
271
271
|
def resolve_tool(tool, arguments)
|
|
272
|
-
registered =
|
|
272
|
+
registered = @stream.find_tool(tool["name"])
|
|
273
273
|
fn = (registered || LLM::Function.new(tool["name"])).dup.tap do |fn|
|
|
274
274
|
fn.id = tool["call_id"]
|
|
275
275
|
fn.arguments = arguments
|
|
@@ -185,7 +185,7 @@ class LLM::OpenAI
|
|
|
185
185
|
end
|
|
186
186
|
|
|
187
187
|
def resolve_tool(tool, function, arguments)
|
|
188
|
-
registered =
|
|
188
|
+
registered = @stream.find_tool(function["name"])
|
|
189
189
|
fn = (registered || LLM::Function.new(function["name"])).dup.tap do |fn|
|
|
190
190
|
fn.id = tool["id"]
|
|
191
191
|
fn.arguments = arguments
|
data/lib/llm/stream.rb
CHANGED
|
@@ -83,12 +83,12 @@ module LLM
|
|
|
83
83
|
# `tool.mcp? ? ctx.spawn(tool, :task) : ctx.spawn(tool, :ractor)`.
|
|
84
84
|
# When a streamed tool cannot be resolved, `error` is passed as an
|
|
85
85
|
# {LLM::Function::Return}. It can be sent back to the model, allowing
|
|
86
|
-
# the tool-call path to recover and the session to continue.
|
|
87
|
-
# resolution
|
|
88
|
-
# {LLM
|
|
89
|
-
#
|
|
90
|
-
#
|
|
91
|
-
# and does not support MCP tools.
|
|
86
|
+
# the tool-call path to recover and the session to continue. Streamed
|
|
87
|
+
# tool resolution now prefers the current request tools, so
|
|
88
|
+
# {LLM.function}, MCP tools, bound tool instances, and normal
|
|
89
|
+
# {LLM::Tool LLM::Tool} classes can all resolve through the same
|
|
90
|
+
# request-local path. The current `:ractor` mode is for class-based
|
|
91
|
+
# tools and does not support MCP tools.
|
|
92
92
|
# @param [LLM::Function] tool
|
|
93
93
|
# The parsed tool call.
|
|
94
94
|
# @param [LLM::Function::Return, nil] error
|
|
@@ -148,6 +148,34 @@ module LLM
|
|
|
148
148
|
})
|
|
149
149
|
end
|
|
150
150
|
|
|
151
|
+
##
|
|
152
|
+
# Returns the tool definitions available for the current streamed request.
|
|
153
|
+
# This prefers request-local tools attached to the stream and falls back
|
|
154
|
+
# to the current context defaults when present.
|
|
155
|
+
# @return [Array<LLM::Function, LLM::Tool>]
|
|
156
|
+
def tools
|
|
157
|
+
extra[:tools] || ctx&.params&.dig(:tools) || []
|
|
158
|
+
end
|
|
159
|
+
|
|
160
|
+
##
|
|
161
|
+
# Resolves a streamed tool call against the current request tools first,
|
|
162
|
+
# then falls back to the global function registry.
|
|
163
|
+
# @param [String] name
|
|
164
|
+
# @return [LLM::Function, nil]
|
|
165
|
+
def find_tool(name)
|
|
166
|
+
tool = tools.find do |candidate|
|
|
167
|
+
candidate_name =
|
|
168
|
+
if candidate.respond_to?(:function)
|
|
169
|
+
candidate.function.name
|
|
170
|
+
else
|
|
171
|
+
candidate.name
|
|
172
|
+
end
|
|
173
|
+
candidate_name.to_s == name.to_s
|
|
174
|
+
end
|
|
175
|
+
tool&.then { _1.respond_to?(:function) ? _1.function : _1 } ||
|
|
176
|
+
LLM::Function.find_by_name(name)
|
|
177
|
+
end
|
|
178
|
+
|
|
151
179
|
# @endgroup
|
|
152
180
|
end
|
|
153
181
|
end
|
data/lib/llm/version.rb
CHANGED