llm.rb 6.1.0 → 8.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -26,7 +26,8 @@ class LLM::Function
26
26
  # Controls concurrency strategy:
27
27
  # - `:thread`: Use threads
28
28
  # - `:task`: Use async tasks (requires async gem)
29
- # - `:fiber`: Use raw fibers
29
+ # - `:fiber`: Use scheduler-backed fibers (requires Fiber.scheduler)
30
+ # - `:fork`: Use forked child processes
30
31
  # - `:ractor`: Use Ruby ractors (class-based tools only; MCP tools are not supported)
31
32
  #
32
33
  # @return [LLM::Function::ThreadGroup, LLM::Function::TaskGroup, LLM::Function::FiberGroup, LLM::Function::Ractor::Group]
@@ -38,10 +39,12 @@ class LLM::Function
38
39
  ThreadGroup.new(map { |fn| fn.spawn(:thread) })
39
40
  when :fiber
40
41
  FiberGroup.new(map { |fn| fn.spawn(:fiber) })
42
+ when :fork
43
+ Fork::Group.new(map { |fn| fn.spawn(:fork) })
41
44
  when :ractor
42
45
  Ractor::Group.new(map { |fn| fn.spawn(:ractor) })
43
46
  else
44
- raise ArgumentError, "Unknown strategy: #{strategy.inspect}. Expected :thread, :task, :fiber, or :ractor"
47
+ raise ArgumentError, "Unknown strategy: #{strategy.inspect}. Expected :thread, :task, :fiber, :fork, or :ractor"
45
48
  end
46
49
  end
47
50
 
@@ -53,7 +56,8 @@ class LLM::Function
53
56
  # Controls concurrency strategy:
54
57
  # - `:thread`: Use threads
55
58
  # - `:task`: Use async tasks (requires async gem)
56
- # - `:fiber`: Use raw fibers
59
+ # - `:fiber`: Use scheduler-backed fibers (requires Fiber.scheduler)
60
+ # - `:fork`: Use forked child processes
57
61
  # - `:ractor`: Use Ruby ractors (class-based tools only; MCP tools are not supported)
58
62
  #
59
63
  # @return [Array<LLM::Function::Return>]
@@ -4,10 +4,10 @@ class LLM::Function
4
4
  ##
5
5
  # The {LLM::Function::FiberGroup} class wraps an array of
6
6
  # {Fiber} objects that are running {LLM::Function} calls
7
- # concurrently using raw fibers.
7
+ # concurrently using scheduler-backed fibers.
8
8
  #
9
9
  # This class provides the same interface as {LLM::Function::ThreadGroup}
10
- # but uses raw fibers for lightweight concurrency without the async gem.
10
+ # but uses scheduler-backed fibers for cooperative concurrency.
11
11
  #
12
12
  # @example
13
13
  # llm = LLM.openai(key: ENV["KEY"])
@@ -90,10 +90,16 @@ class LLM::Function
90
90
  # order as the original fibers.
91
91
  def wait
92
92
  @fibers.map do |fiber|
93
- fiber.resume if fiber.alive?
93
+ fiber.alive? ? scheduler.run : nil
94
94
  fiber.value
95
95
  end
96
96
  end
97
97
  alias_method :value, :wait
98
+
99
+ private
100
+
101
+ def scheduler
102
+ Fiber.scheduler
103
+ end
98
104
  end
99
105
  end
@@ -0,0 +1,67 @@
1
+ # frozen_string_literal: true
2
+
3
+ class LLM::Function
4
+ ##
5
+ # The {LLM::Function::Fork::Job} class represents a single fork-backed
6
+ # function call inside the child process.
7
+ #
8
+ # It is executed in the forked process and is responsible for running the
9
+ # resolved tool instance, handling control messages such as interrupts, and
10
+ # writing the final result back to the parent process.
11
+ class Fork::Job
12
+ ##
13
+ # @param [LLM::Function] function
14
+ # @param [LLM::Object] ch
15
+ # @return [LLM::Function::Fork::Job]
16
+ def initialize(function, ch)
17
+ @function = function
18
+ @ch = ch
19
+ end
20
+
21
+ ##
22
+ # @return [void]
23
+ def call
24
+ runner = @function.runner
25
+ controller = setup(runner)
26
+ @ch.result.write([:result, call!(runner)])
27
+ rescue => ex
28
+ @ch.result.write([:result, error(ex)])
29
+ ensure
30
+ controller&.kill
31
+ [@ch.control, @ch.result].each { _1.close unless _1.closed? }
32
+ end
33
+
34
+ private
35
+
36
+ def call!(runner)
37
+ kwargs = if Hash === @function.arguments
38
+ @function.arguments.transform_keys(&:to_sym)
39
+ else
40
+ @function.arguments
41
+ end
42
+ {id: @function.id, name: @function.name, value: runner.call(**kwargs)}
43
+ end
44
+
45
+ def error(ex)
46
+ {
47
+ id: @function.id,
48
+ name: @function.name,
49
+ value: {error: true, type: ex.class.name, message: ex.message}
50
+ }
51
+ end
52
+
53
+ def setup(runner)
54
+ ready = Queue.new
55
+ thread = Thread.new do
56
+ ready << true
57
+ kind = @ch.control.recv
58
+ next unless kind == :interrupt
59
+ hook = %i[on_cancel on_interrupt].find { runner.respond_to?(_1) }
60
+ runner.public_send(hook) if hook
61
+ rescue IOError, ArgumentError
62
+ end
63
+ ready.pop
64
+ thread
65
+ end
66
+ end
67
+ end
@@ -0,0 +1,76 @@
1
+ # frozen_string_literal: true
2
+
3
+ class LLM::Function
4
+ ##
5
+ # The {LLM::Function::Fork::Task} class wraps a fork-backed function call
6
+ # and exchanges control and result messages with the child process.
7
+ class Fork::Task
8
+ ##
9
+ # @param [LLM::Function] function
10
+ # @param [LLM::Tracer, nil] tracer
11
+ # @param [Object, nil] span
12
+ # @return [LLM::Function::Fork::Task]
13
+ def initialize(function, tracer: nil, span: nil)
14
+ @function = function
15
+ @tracer = tracer
16
+ @span = span
17
+ @waited = false
18
+ end
19
+
20
+ ##
21
+ # @return [LLM::Function::Fork::Task]
22
+ def spawn
23
+ @ch = LLM::Object.from(control: xchan(:marshal), result: xchan(:marshal))
24
+ @pid = Kernel.fork { Fork::Job.new(@function, @ch).call }
25
+ self
26
+ end
27
+
28
+ ##
29
+ # @return [Boolean]
30
+ def alive?
31
+ return false if @waited
32
+ result = ::Process.waitpid(@pid, ::Process::WNOHANG)
33
+ @waited = !result.nil?
34
+ !@waited
35
+ rescue Errno::ECHILD
36
+ @waited = true
37
+ false
38
+ end
39
+
40
+ ##
41
+ # @return [nil]
42
+ def interrupt!
43
+ return nil if @waited
44
+ @ch.control.write(:interrupt)
45
+ nil
46
+ rescue Errno::ESRCH, IOError
47
+ nil
48
+ end
49
+ alias_method :cancel!, :interrupt!
50
+
51
+ ##
52
+ # @return [LLM::Function::Return]
53
+ def wait
54
+ kind, data = @ch.result.recv
55
+ raise ArgumentError, "Unknown fork message: #{kind.inspect}" unless kind == :result
56
+ result = Return.new(data[:id], data[:name], data[:value])
57
+ reap
58
+ @tracer&.on_tool_finish(result:, span: @span)
59
+ result
60
+ ensure
61
+ reap
62
+ [@ch.control, @ch.result].each { _1.close unless _1.closed? }
63
+ end
64
+ alias_method :value, :wait
65
+
66
+ private
67
+
68
+ def reap
69
+ return if @waited
70
+ ::Process.waitpid(@pid)
71
+ @waited = true
72
+ rescue Errno::ECHILD
73
+ @waited = true
74
+ end
75
+ end
76
+ end
@@ -0,0 +1,8 @@
1
+ # frozen_string_literal: true
2
+
3
+ class LLM::Function
4
+ module Fork
5
+ require_relative "fork/job"
6
+ require_relative "fork/task"
7
+ end
8
+ end
@@ -0,0 +1,36 @@
1
+ # frozen_string_literal: true
2
+
3
+ class LLM::Function
4
+ ##
5
+ # The {LLM::Function::Fork::Group} class wraps an array of
6
+ # {LLM::Function::Task} objects that are running in forked child processes.
7
+ class Fork::Group
8
+ ##
9
+ # @param [Array<LLM::Function::Task>] tasks
10
+ # @return [LLM::Function::Fork::Group]
11
+ def initialize(tasks)
12
+ @tasks = tasks
13
+ end
14
+
15
+ ##
16
+ # @return [Boolean]
17
+ def alive?
18
+ @tasks.any?(&:alive?)
19
+ end
20
+
21
+ ##
22
+ # @return [nil]
23
+ def interrupt!
24
+ @tasks.each(&:interrupt!)
25
+ nil
26
+ end
27
+ alias_method :cancel!, :interrupt!
28
+
29
+ ##
30
+ # @return [Array<LLM::Function::Return>]
31
+ def wait
32
+ @tasks.map(&:wait)
33
+ end
34
+ alias_method :value, :wait
35
+ end
36
+ end
@@ -19,9 +19,19 @@ class LLM::Function
19
19
  # @param [Object, nil] span
20
20
  # @return [LLM::Function::Ractor::Task]
21
21
  def initialize(runner_class, id, name, arguments, tracer: nil, span: nil)
22
+ @runner_class = runner_class
23
+ @id = id
24
+ @name = name
25
+ @arguments = arguments
22
26
  @tracer = tracer
23
27
  @span = span
24
- @mailbox = Ractor::Mailbox.new(build_task(runner_class, id, name, arguments))
28
+ end
29
+
30
+ ##
31
+ # @return [LLM::Function::Ractor::Task]
32
+ def spawn
33
+ @mailbox = Ractor::Mailbox.new(build_task)
34
+ self
25
35
  end
26
36
 
27
37
  ##
@@ -49,8 +59,8 @@ class LLM::Function
49
59
 
50
60
  private
51
61
 
52
- def build_task(runner_class, id, name, arguments)
53
- ::Ractor.new(runner_class, id, name, arguments) do |runner_class, id, name, arguments|
62
+ def build_task
63
+ ::Ractor.new(@runner_class, @id, @name, @arguments) do |runner_class, id, name, arguments|
54
64
  LLM::Function::Ractor::Job.new(::Ractor.current, runner_class, id, name, arguments).call
55
65
  end
56
66
  end
@@ -3,7 +3,8 @@
3
3
  class LLM::Function
4
4
  ##
5
5
  # The {LLM::Function::Task} class wraps a single concurrent function call and
6
- # provides a small, uniform interface across threads, fibers, and async tasks.
6
+ # provides a small, uniform interface across threads, scheduler-backed fibers,
7
+ # and async tasks.
7
8
  class Task
8
9
  ##
9
10
  # @return [Object]
@@ -32,6 +33,7 @@ class LLM::Function
32
33
  ##
33
34
  # @return [nil]
34
35
  def interrupt!
36
+ task.interrupt! if task.respond_to?(:interrupt!)
35
37
  function&.interrupt!
36
38
  nil
37
39
  end
@@ -43,12 +45,18 @@ class LLM::Function
43
45
  if Thread === task
44
46
  task.value
45
47
  elsif Fiber === task
46
- task.resume if task.alive?
48
+ fiber.alive? ? scheduler.run : nil
47
49
  task.value
48
50
  else
49
51
  task.wait
50
52
  end
51
53
  end
52
54
  alias_method :value, :wait
55
+
56
+ private
57
+
58
+ def scheduler
59
+ Fiber.scheduler
60
+ end
53
61
  end
54
62
  end
data/lib/llm/function.rb CHANGED
@@ -36,6 +36,8 @@ class LLM::Function
36
36
  require_relative "function/thread_group"
37
37
  require_relative "function/fiber_group"
38
38
  require_relative "function/task_group"
39
+ require_relative "function/fork"
40
+ require_relative "function/fork_group"
39
41
  require_relative "function/ractor"
40
42
  require_relative "function/ractor_group"
41
43
 
@@ -209,7 +211,9 @@ class LLM::Function
209
211
  # Controls concurrency strategy:
210
212
  # - `:thread`: Use threads
211
213
  # - `:task`: Use async tasks (requires async gem)
212
- # - `:fiber`: Use raw fibers
214
+ # - `:fork`: Use a forked child process (requires xchan.rb support)
215
+ # - `:fiber`: Use scheduler-backed fibers (requires Fiber.scheduler)
216
+ # - `:fork`: Use a forked child process (requires xchan.rb support)
213
217
  # - `:ractor`: Use Ruby ractors (class-based tools only; MCP tools are not supported)
214
218
  #
215
219
  # @return [LLM::Function::Task]
@@ -217,25 +221,26 @@ class LLM::Function
217
221
  def spawn(strategy)
218
222
  task = case strategy
219
223
  when :task
220
- require "async" unless defined?(::Async)
224
+ LLM.require "async" unless defined?(::Async)
221
225
  Async { call! }
222
226
  when :thread
223
227
  Thread.new { call! }
224
228
  when :fiber
225
- Fiber.new do
226
- call!
227
- ensure
228
- Fiber.yield
229
- end.tap(&:resume)
229
+ raise ArgumentError, "Fiber concurrency requires Fiber.scheduler" unless Fiber.scheduler
230
+ Fiber.schedule { call! }
231
+ when :fork
232
+ LLM.require "xchan" unless defined?(::Chan::UNIXSocket)
233
+ span = @tracer&.on_tool_start(id:, name:, arguments:, model:)
234
+ Fork::Task.new(self, tracer: @tracer, span:).spawn
230
235
  when :ractor
231
236
  raise LLM::RactorError, "Ractor concurrency only supports class-based tools" unless Class === @runner
232
237
  if @runner.respond_to?(:skill?) && @runner.skill?
233
238
  raise LLM::RactorError, "Ractor concurrency does not support skill-backed tools"
234
239
  end
235
240
  span = @tracer&.on_tool_start(id:, name:, arguments:, model:)
236
- Ractor::Task.new(@runner, id, name, arguments, tracer: @tracer, span:)
241
+ Ractor::Task.new(@runner, id, name, arguments, tracer: @tracer, span:).spawn
237
242
  else
238
- raise ArgumentError, "Unknown strategy: #{strategy.inspect}. Expected :thread, :task, :fiber, or :ractor"
243
+ raise ArgumentError, "Unknown strategy: #{strategy.inspect}. Expected :thread, :task, :fiber, :fork, or :ractor"
239
244
  end
240
245
  Task.new(task, self)
241
246
  ensure
@@ -306,6 +311,15 @@ class LLM::Function
306
311
  end
307
312
  end
308
313
 
314
+ ##
315
+ # Returns the bound function runner instance.
316
+ # @return [Object]
317
+ def runner
318
+ runner = Class === @runner ? @runner.new : @runner
319
+ runner.tracer = @tracer if runner.respond_to?(:tracer=)
320
+ runner
321
+ end
322
+
309
323
  private
310
324
 
311
325
  def format_openai(provider)
@@ -331,8 +345,7 @@ class LLM::Function
331
345
  # @return [LLM::Function::Return]
332
346
  # Returns a Return object with either the function result or error information.
333
347
  def call_function
334
- runner = ((Class === @runner) ? @runner.new : @runner)
335
- runner.tracer = @tracer if runner.respond_to?(:tracer=)
348
+ runner = self.runner
336
349
  kwargs = Hash === arguments ? arguments.transform_keys(&:to_sym) : arguments
337
350
  Return.new(id, name, runner.call(**kwargs))
338
351
  rescue => ex
@@ -10,8 +10,7 @@
10
10
  #
11
11
  # {LLM::LoopGuard LLM::LoopGuard} detects when a context is repeating the same
12
12
  # tool-call pattern instead of making progress. It is directly inspired by
13
- # General Intelligence Systems' Brute runtime and its doom-loop detection
14
- # approach.
13
+ # General Intelligence Systems and its doom-loop detection approach.
15
14
  #
16
15
  # The public interface is intentionally small:
17
16
  # - `call(ctx)` returns `nil` when no intervention is needed
@@ -22,14 +21,6 @@
22
21
  # {LLM::Agent LLM::Agent} enables this guard by default through its wrapped
23
22
  # context.
24
23
  #
25
- # Brute is MIT licensed. The relevant license grant is:
26
- #
27
- # Permission is hereby granted, free of charge, to any person obtaining a copy
28
- # of this software and associated documentation files (the "Software"), to deal
29
- # in the Software without restriction, including without limitation the rights
30
- # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
31
- # copies of the Software, and to permit persons to whom the Software is
32
- # furnished to do so.
33
24
  class LLM::LoopGuard
34
25
  ##
35
26
  # The default number of repeated tool-call patterns required before
@@ -34,7 +34,7 @@ class LLM::MCP
34
34
  # @return [void]
35
35
  def start
36
36
  raise LLM::MCP::Error, "MCP command is already running" if alive?
37
- @stdout, @stderr, @stdin = 3.times.map { Pipe.new }
37
+ @stdout, @stderr, @stdin = 3.times.map { LLM::Pipe.new }
38
38
  @buffers.clear
39
39
  @pid = Process.spawn(env.to_h, *argv, {chdir: cwd, out: stdout.w, err: stderr.w, in: stdin.r}.compact)
40
40
  [stdin.close_reader, [stdout, stderr].each(&:close_writer)]
@@ -104,12 +104,12 @@ module LLM::MCP::Transport
104
104
  # Configures the transport to use a persistent HTTP connection pool
105
105
  # via the optional dependency [Net::HTTP::Persistent](https://github.com/drbrain/net-http-persistent)
106
106
  # @example
107
- # mcp = LLM.mcp(http: {url: "https://example.com/mcp"}).persistent
107
+ # mcp = LLM::MCP.http(url: "https://example.com/mcp", persistent: true)
108
108
  # # do something with 'mcp'
109
109
  # @return [LLM::MCP::Transport::HTTP]
110
110
  def persist!
111
111
  LLM.lock(:mcp) do
112
- require "net/http/persistent" unless defined?(Net::HTTP::Persistent)
112
+ LLM.require "net/http/persistent" unless defined?(Net::HTTP::Persistent)
113
113
  unless LLM::MCP.clients.key?(key)
114
114
  http = Net::HTTP::Persistent.new(name: self.class.name)
115
115
  http.read_timeout = timeout
data/lib/llm/mcp.rb CHANGED
@@ -19,17 +19,18 @@ class LLM::MCP
19
19
  require_relative "mcp/mailbox"
20
20
  require_relative "mcp/router"
21
21
  require_relative "mcp/rpc"
22
- require_relative "mcp/pipe"
23
22
  require_relative "mcp/transport/http"
24
23
  require_relative "mcp/transport/stdio"
25
24
 
26
25
  include RPC
27
26
 
28
- @@clients = {}
27
+ @clients = {}
29
28
 
30
29
  ##
31
30
  # @api private
32
- def self.clients = @@clients
31
+ def self.clients
32
+ @clients
33
+ end
33
34
 
34
35
  ##
35
36
  # Builds an MCP client that uses the stdio transport.
@@ -80,7 +81,9 @@ class LLM::MCP
80
81
  @command = Command.new(**stdio)
81
82
  @transport = Transport::Stdio.new(command:)
82
83
  elsif http
84
+ persistent = http.delete(:persistent)
83
85
  @transport = Transport::HTTP.new(**http, timeout:)
86
+ @transport.persistent if persistent
84
87
  else
85
88
  raise ArgumentError, "stdio or http is required"
86
89
  end
@@ -122,7 +125,7 @@ class LLM::MCP
122
125
  # Configures an HTTP MCP transport to use a persistent connection pool
123
126
  # via the optional dependency [Net::HTTP::Persistent](https://github.com/drbrain/net-http-persistent)
124
127
  # @example
125
- # mcp = LLM.mcp(http: {url: "https://example.com/mcp"}).persistent
128
+ # mcp = LLM::MCP.http(url: "https://example.com/mcp", persistent: true)
126
129
  # # do something with 'mcp'
127
130
  # @return [LLM::MCP]
128
131
  def persist!
@@ -4,6 +4,8 @@ class LLM::Object
4
4
  ##
5
5
  # @private
6
6
  module Kernel
7
+ TypeError = ::TypeError
8
+
7
9
  def tap(...)
8
10
  ::Kernel.instance_method(:tap).bind(self).call(...)
9
11
  end
@@ -26,11 +28,15 @@ class LLM::Object
26
28
  alias_method :is_a?, :kind_of?
27
29
 
28
30
  def respond_to?(m, include_private = false)
29
- !!key(m) || self.class.method_defined?(m)
31
+ !!SINGLETON.key(@h, m) || self.class.method_defined?(m)
30
32
  end
31
33
 
32
34
  def respond_to_missing?(m, include_private = false)
33
- !!key(m)
35
+ !!SINGLETON.key(@h, m)
36
+ end
37
+
38
+ def raise(...)
39
+ ::Kernel.raise(...)
34
40
  end
35
41
 
36
42
  def object_id
data/lib/llm/object.rb CHANGED
@@ -8,6 +8,37 @@ class LLM::Object < BasicObject
8
8
  require_relative "object/builder"
9
9
  require_relative "object/kernel"
10
10
 
11
+ SINGLETON = self
12
+ UNDEFINED = ::Object.new.freeze
13
+ LLM = ::LLM
14
+ private_constant :SINGLETON, :UNDEFINED, :LLM
15
+
16
+ ##
17
+ # @api private
18
+ # @param [Hash] h
19
+ # @param [#to_s, #to_sym] k
20
+ # @return [String, Symbol, nil]
21
+ def self.key(h, k)
22
+ return nil if k.nil?
23
+ if h.key?(k.to_s)
24
+ k.to_s
25
+ elsif h.key?(k.to_sym)
26
+ k.to_sym
27
+ else
28
+ nil
29
+ end
30
+ end
31
+
32
+ ##
33
+ # @api private
34
+ # @param [Hash] h
35
+ # @param [#to_s, #to_sym] k
36
+ # @return [Object, nil]
37
+ def self.get(h, k)
38
+ name = key(h, k)
39
+ h[name] if name
40
+ end
41
+
11
42
  extend Builder
12
43
  include Kernel
13
44
  include ::Enumerable
@@ -33,7 +64,7 @@ class LLM::Object < BasicObject
33
64
  # @param [Symbol, #to_sym] k
34
65
  # @return [Object]
35
66
  def [](k)
36
- @h[key(k)]
67
+ @h[SINGLETON.key(@h, k)]
37
68
  end
38
69
 
39
70
  ##
@@ -47,7 +78,7 @@ class LLM::Object < BasicObject
47
78
  ##
48
79
  # @return [String]
49
80
  def to_json(...)
50
- to_h.to_json(...)
81
+ LLM.json.dump(to_h, ...)
51
82
  end
52
83
 
53
84
  ##
@@ -83,16 +114,39 @@ class LLM::Object < BasicObject
83
114
  ##
84
115
  # @param [String, Symbol] k
85
116
  # @return [Boolean]
86
- def key?(k)
87
- @h.key?(key(k))
117
+ def key?(k = UNDEFINED)
118
+ return SINGLETON.get(@h, :key?) if k.equal?(UNDEFINED)
119
+ @h.key?(SINGLETON.key(@h, k))
88
120
  end
89
121
  alias_method :has_key?, :key?
90
122
 
91
123
  ##
92
124
  # @param [String, Symbol] k
93
125
  # @return [Object]
94
- def fetch(k, *args, &b)
95
- @h.fetch(key(k), *args, &b)
126
+ def fetch(k = UNDEFINED, *args, &b)
127
+ return SINGLETON.get(@h, :fetch) if k.equal?(UNDEFINED)
128
+ @h.fetch(SINGLETON.key(@h, k), *args, &b)
129
+ end
130
+
131
+ ##
132
+ # @param [Hash, to_h] other
133
+ # The hash to merge
134
+ # @return [LLM::Object]
135
+ # Returns a new LLM::Object
136
+ def merge(other = UNDEFINED)
137
+ return SINGLETON.get(@h, :merge) if other.equal?(UNDEFINED)
138
+ other = ::Hash.try_convert(other)
139
+ raise TypeError, "#{other} cannot be coerced into a Hash" unless other
140
+ SINGLETON.from @h.merge(other)
141
+ end
142
+
143
+ ##
144
+ # @param [#to_s, #to_sym] k
145
+ # The key name
146
+ # @return [void]
147
+ def delete(k = UNDEFINED)
148
+ return SINGLETON.get(@h, :delete) if k.equal?(UNDEFINED)
149
+ @h.delete(SINGLETON.key(@h, k))
96
150
  end
97
151
 
98
152
  ##
@@ -110,14 +164,16 @@ class LLM::Object < BasicObject
110
164
 
111
165
  ##
112
166
  # @return [Object, nil]
113
- def dig(...)
114
- @h.dig(...)
167
+ def dig(*args)
168
+ return SINGLETON.get(@h, :dig) if args.empty?
169
+ @h.dig(*args)
115
170
  end
116
171
 
117
172
  ##
118
173
  # @return [Hash]
119
- def slice(...)
120
- @h.slice(...)
174
+ def slice(*args)
175
+ return SINGLETON.get(@h, :slice) if args.empty?
176
+ @h.slice(*args)
121
177
  end
122
178
 
123
179
  private
@@ -125,20 +181,10 @@ class LLM::Object < BasicObject
125
181
  def method_missing(m, *args, &b)
126
182
  if m.to_s.end_with?("=")
127
183
  self[m[0..-2]] = args.first
128
- elsif k = key(m)
184
+ elsif k = SINGLETON.key(@h, m)
129
185
  @h[k]
130
186
  else
131
187
  nil
132
188
  end
133
189
  end
134
-
135
- def key(k)
136
- if @h.key?(k.to_s)
137
- k.to_s
138
- elsif @h.key?(k.to_sym)
139
- k.to_sym
140
- else
141
- nil
142
- end
143
- end
144
190
  end