llm.rb 5.3.0 → 5.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 39e1632fb63f83a65c5a146ea2a2f4178d0d99d26d2a347f36d360c09ea9845d
4
- data.tar.gz: 04b2236d5cac243cc496b686d8d8a5097676e7bcd6973bacfa8d1f7e8d48e270
3
+ metadata.gz: 0bd3ea0956fe1a9fa53bec3211dc4afe6f03c15fa67304ce5ba2c922d20abff1
4
+ data.tar.gz: 1aa03e4fc3eafbbbf9367deb8714f844ccd41299c666595d1d081a2db4d9d42e
5
5
  SHA512:
6
- metadata.gz: 1b4a68bd3b3e109a00f996f520296405ad6066b4f17c1e59da4077c2023c5fe0c95e770b9bd563a531748a16d79355f8db5fb2dcd66ac42673698ddcbea07704
7
- data.tar.gz: 12713c07834164f3d13d01613126488cc19e937b8f127fcdbf839d8c75f2f6b77c6207e7e3e6f515d996e35aedb6d6e7333ba563a3f4578f4c33f454d41c6088
6
+ metadata.gz: 0e14b7cb29b5130b703c26369b6ecec106117e6a045bbcbeb79019c96814d9969e387c25992d2f3c96fd2ea43143ca4c48f00e91a00cc6cb3e20556145254d80
7
+ data.tar.gz: 3d34913dba2eab22f6f794196d59791cbadfeb71b9b4aaf588d46607112c4a0a0f741a9e8c9d308afb86d5d678a753b8a551ca66b08351581677845012c8e583
data/CHANGELOG.md CHANGED
@@ -2,8 +2,34 @@
2
2
 
3
3
  ## Unreleased
4
4
 
5
+ Changes since `v5.4.0`.
6
+
7
+ ## v5.4.0
8
+
5
9
  Changes since `v5.3.0`.
6
10
 
11
+ This release expands tracer support around agentic execution. It lets
12
+ `LLM::Agent` define scoped tracers through the agent DSL and fixes concurrent
13
+ tool execution so those scoped tracers stay attached when work crosses
14
+ thread, task, fiber, and skill boundaries.
15
+
16
+ ### Change
17
+
18
+ * **Add agent-scoped tracers** <br>
19
+ Let `LLM::Agent` classes define `tracer ...` or `tracer { ... }` so an
20
+ agent can carry its own tracer without replacing the provider's default
21
+ tracer. The resolved tracer is scoped to that agent's turns, tool loops,
22
+ and pending tool access. Available through the `acts_as_agent` and Sequel
23
+ agent plugin `tracer` DSL too.
24
+
25
+ ### Fix
26
+
27
+ * **Preserve scoped tracers across concurrent tool work** <br>
28
+ Keep agent- and request-scoped tracers attached when tool execution
29
+ crosses `:thread`, `:task`, or `:fiber` boundaries, including skill
30
+ execution, so spawned work does not fall back to the provider default
31
+ tracer.
32
+
7
33
  ## v5.3.0
8
34
 
9
35
  Changes since `v5.2.1`.
data/README.md CHANGED
@@ -4,7 +4,7 @@
4
4
  <p align="center">
5
5
  <a href="https://0x1eef.github.io/x/llm.rb?rebuild=1"><img src="https://img.shields.io/badge/docs-0x1eef.github.io-blue.svg" alt="RubyDoc"></a>
6
6
  <a href="https://opensource.org/license/0bsd"><img src="https://img.shields.io/badge/License-0BSD-orange.svg?" alt="License"></a>
7
- <a href="https://github.com/llmrb/llm.rb/tags"><img src="https://img.shields.io/badge/version-5.3.0-green.svg?" alt="Version"></a>
7
+ <a href="https://github.com/llmrb/llm.rb/tags"><img src="https://img.shields.io/badge/version-5.4.0-green.svg?" alt="Version"></a>
8
8
  </p>
9
9
 
10
10
  ## About
@@ -87,6 +87,7 @@ Review the release state, summarize what changed, and prepare the release.
87
87
  class Agent < LLM::Agent
88
88
  model "gpt-5.4-mini"
89
89
  skills "./skills/release"
90
+ tracer { LLM::Tracer::Logger.new(llm, path: "logs/release-agent.log") }
90
91
  end
91
92
 
92
93
  llm = LLM.openai(key: ENV["KEY"])
@@ -566,6 +567,7 @@ class Agent < LLM::Agent
566
567
  model "gpt-5.4-mini"
567
568
  instructions "You are a concise release assistant."
568
569
  skills "./skills/release", "./skills/review"
570
+ tracer { LLM::Tracer::Logger.new(llm, path: "logs/release-agent.log") }
569
571
  end
570
572
 
571
573
  llm = LLM.openai(key: ENV["KEY"])
@@ -41,6 +41,11 @@ module LLM::ActiveRecord
41
41
  agent.concurrency(concurrency)
42
42
  end
43
43
 
44
+ def tracer(tracer = nil, &block)
45
+ return agent.tracer if tracer.nil? && !block
46
+ agent.tracer(tracer, &block)
47
+ end
48
+
44
49
  def agent
45
50
  @agent ||= Class.new(LLM::Agent)
46
51
  end
data/lib/llm/agent.rb CHANGED
@@ -115,6 +115,26 @@ module LLM
115
115
  @concurrency = concurrency
116
116
  end
117
117
 
118
+ ##
119
+ # Set or get the default tracer.
120
+ #
121
+ # When a block is provided, it is stored and evaluated lazily against the
122
+ # agent instance during initialization so it can build a tracer from the
123
+ # resolved provider.
124
+ #
125
+ # @example
126
+ # class Agent < LLM::Agent
127
+ # tracer { LLM::Tracer::Logger.new(llm, io: $stdout) }
128
+ # end
129
+ #
130
+ # @param [LLM::Tracer, Proc, nil] tracer
131
+ # @yieldreturn [LLM::Tracer, nil]
132
+ # @return [LLM::Tracer, Proc, nil]
133
+ def self.tracer(tracer = nil, &block)
134
+ return @tracer if tracer.nil? && !block
135
+ @tracer = block || tracer
136
+ end
137
+
118
138
  ##
119
139
  # @param [LLM::Provider] provider
120
140
  # A provider
@@ -131,6 +151,7 @@ module LLM
131
151
  defaults = {model: self.class.model, tools: self.class.tools, skills: self.class.skills, schema: self.class.schema}.compact
132
152
  @concurrency = params.delete(:concurrency) || self.class.concurrency
133
153
  @llm = llm
154
+ @tracer = resolve_option(self.class.tracer) unless self.class.tracer.nil?
134
155
  @ctx = LLM::Context.new(llm, defaults.merge({guard: true}).merge(params))
135
156
  end
136
157
 
@@ -179,7 +200,7 @@ module LLM
179
200
  ##
180
201
  # @return [Array<LLM::Function>]
181
202
  def functions
182
- @ctx.functions
203
+ @tracer ? @llm.with_tracer(@tracer) { @ctx.functions } : @ctx.functions
183
204
  end
184
205
 
185
206
  ##
@@ -193,14 +214,14 @@ module LLM
193
214
  # @see LLM::Context#call
194
215
  # @return [Object]
195
216
  def call(...)
196
- @ctx.call(...)
217
+ @tracer ? @llm.with_tracer(@tracer) { @ctx.call(...) } : @ctx.call(...)
197
218
  end
198
219
 
199
220
  ##
200
221
  # @see LLM::Context#wait
201
222
  # @return [Array<LLM::Function::Return>]
202
223
  def wait(...)
203
- @ctx.wait(...)
224
+ @tracer ? @llm.with_tracer(@tracer) { @ctx.wait(...) } : @ctx.wait(...)
204
225
  end
205
226
 
206
227
  ##
@@ -257,7 +278,7 @@ module LLM
257
278
  # @return [LLM::Tracer]
258
279
  # Returns an LLM tracer
259
280
  def tracer
260
- @ctx.tracer
281
+ @tracer || @ctx.tracer
261
282
  end
262
283
 
263
284
  ##
@@ -371,14 +392,21 @@ module LLM
371
392
  end
372
393
 
373
394
  def run_loop(method, prompt, params)
374
- max = Integer(params.delete(:tool_attempts) || 25)
375
- res = @ctx.public_send(method, apply_instructions(prompt), params)
376
- max.times do
377
- break if @ctx.functions.empty?
378
- res = @ctx.public_send(method, call_functions, params)
395
+ loop = proc do
396
+ max = Integer(params.delete(:tool_attempts) || 25)
397
+ res = @ctx.public_send(method, apply_instructions(prompt), params)
398
+ max.times do
399
+ break if @ctx.functions.empty?
400
+ res = @ctx.public_send(method, call_functions, params)
401
+ end
402
+ raise LLM::ToolLoopError, "pending tool calls remain" unless @ctx.functions.empty?
403
+ res
379
404
  end
380
- raise LLM::ToolLoopError, "pending tool calls remain" unless @ctx.functions.empty?
381
- res
405
+ @tracer ? @llm.with_tracer(@tracer, &loop) : loop.call
406
+ end
407
+
408
+ def resolve_option(option)
409
+ Proc === option ? instance_exec(&option) : option
382
410
  end
383
411
  end
384
412
  end
data/lib/llm/function.rb CHANGED
@@ -218,12 +218,12 @@ class LLM::Function
218
218
  task = case strategy
219
219
  when :task
220
220
  require "async" unless defined?(::Async)
221
- Async { call }
221
+ Async { call! }
222
222
  when :thread
223
- Thread.new { call }
223
+ Thread.new { call! }
224
224
  when :fiber
225
225
  Fiber.new do
226
- call
226
+ call!
227
227
  ensure
228
228
  Fiber.yield
229
229
  end.tap(&:resume)
@@ -328,9 +328,16 @@ class LLM::Function
328
328
  # Returns a Return object with either the function result or error information.
329
329
  def call_function
330
330
  runner = ((Class === @runner) ? @runner.new : @runner)
331
+ runner.tracer = @tracer if runner.respond_to?(:tracer=)
331
332
  kwargs = Hash === arguments ? arguments.transform_keys(&:to_sym) : arguments
332
333
  Return.new(id, name, runner.call(**kwargs))
333
334
  rescue => ex
334
335
  Return.new(id, name, {error: true, type: ex.class.name, message: ex.message})
335
336
  end
337
+
338
+ def call!
339
+ llm = @tracer&.llm
340
+ return call unless llm.respond_to?(:with_tracer)
341
+ llm.with_tracer(@tracer) { call }
342
+ end
336
343
  end
@@ -62,6 +62,11 @@ module LLM::Sequel
62
62
  agent.concurrency(concurrency)
63
63
  end
64
64
 
65
+ def tracer(tracer = nil, &block)
66
+ return agent.tracer if tracer.nil? && !block
67
+ agent.tracer(tracer, &block)
68
+ end
69
+
65
70
  def agent
66
71
  @agent ||= Class.new(LLM::Agent)
67
72
  end
data/lib/llm/skill.rb CHANGED
@@ -74,11 +74,12 @@ module LLM
74
74
  # @param [LLM::Context] ctx
75
75
  # @return [Hash]
76
76
  def call(ctx)
77
- instructions, tools = self.instructions, self.tools
77
+ instructions, tools, tracer = self.instructions, self.tools, ctx.llm.tracer
78
78
  params = ctx.params.merge(mode: ctx.mode).reject { [:tools, :schema].include?(_1) }
79
79
  agent = Class.new(LLM::Agent) do
80
80
  instructions(instructions)
81
81
  tools(*tools)
82
+ tracer(tracer)
82
83
  end.new(ctx.llm, params)
83
84
  agent.messages.concat(messages_for(ctx))
84
85
  res = agent.talk("Solve the user's query.")
@@ -95,6 +96,7 @@ module LLM
95
96
  Class.new(LLM::Tool) do
96
97
  name skill.name
97
98
  description skill.description
99
+ attr_accessor :tracer
98
100
 
99
101
  define_method(:call) do
100
102
  skill.call(ctx)
@@ -114,7 +114,7 @@ module LLM
114
114
  # @param [LLM::Response] res
115
115
  # @api private
116
116
  def finish_attributes(operation, res)
117
- case @provider.class.to_s
117
+ case @llm.class.to_s
118
118
  when "LLM::OpenAI" then openai_attributes(operation, res)
119
119
  else {}
120
120
  end
@@ -233,7 +233,7 @@ module LLM
233
233
  # @param [LLM::Response] res
234
234
  # @api private
235
235
  def finish_attributes(operation, res)
236
- case @provider.class.to_s
236
+ case @llm.class.to_s
237
237
  when "LLM::OpenAI" then openai_attributes(operation, res)
238
238
  else {}
239
239
  end
data/lib/llm/tracer.rb CHANGED
@@ -14,13 +14,17 @@ module LLM
14
14
  require_relative "tracer/langsmith"
15
15
  require_relative "tracer/null"
16
16
 
17
+ ##
18
+ # @return [LLM::Provider]
19
+ attr_reader :llm
20
+
17
21
  ##
18
22
  # @param [LLM::Provider] provider
19
23
  # A provider
20
24
  # @param [Hash] options
21
25
  # A hash of options
22
26
  def initialize(provider, options = {})
23
- @provider = provider
27
+ @llm = provider
24
28
  @options = {}
25
29
  end
26
30
 
@@ -124,7 +128,7 @@ module LLM
124
128
  ##
125
129
  # @return [String]
126
130
  def inspect
127
- "#<#{self.class.name}:0x#{object_id.to_s(16)} @provider=#{@provider.class} @tracer=#{@tracer.inspect}>"
131
+ "#<#{self.class.name}:0x#{object_id.to_s(16)} @provider=#{@llm.class} @tracer=#{@tracer.inspect}>"
128
132
  end
129
133
 
130
134
  ##
@@ -245,19 +249,19 @@ module LLM
245
249
  ##
246
250
  # @return [String]
247
251
  def provider_name
248
- @provider.class.name.split("::").last.downcase
252
+ @llm.class.name.split("::").last.downcase
249
253
  end
250
254
 
251
255
  ##
252
256
  # @return [String]
253
257
  def provider_host
254
- @provider.instance_variable_get(:@host)
258
+ @llm.instance_variable_get(:@host)
255
259
  end
256
260
 
257
261
  ##
258
262
  # @return [String]
259
263
  def provider_port
260
- @provider.instance_variable_get(:@port)
264
+ @llm.instance_variable_get(:@port)
261
265
  end
262
266
  end
263
267
  end
data/lib/llm/version.rb CHANGED
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module LLM
4
- VERSION = "5.3.0"
4
+ VERSION = "5.4.0"
5
5
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: llm.rb
3
3
  version: !ruby/object:Gem::Version
4
- version: 5.3.0
4
+ version: 5.4.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Antar Azri