llm.rb 7.0.0 → 8.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +151 -1
  3. data/README.md +45 -25
  4. data/data/bedrock.json +2948 -0
  5. data/data/deepseek.json +8 -8
  6. data/data/openai.json +39 -2
  7. data/data/xai.json +35 -0
  8. data/data/zai.json +1 -1
  9. data/lib/llm/active_record/acts_as_agent.rb +2 -6
  10. data/lib/llm/active_record/acts_as_llm.rb +4 -82
  11. data/lib/llm/active_record.rb +80 -2
  12. data/lib/llm/agent.rb +9 -4
  13. data/lib/llm/error.rb +4 -0
  14. data/lib/llm/function/array.rb +7 -3
  15. data/lib/llm/function/fiber_group.rb +9 -3
  16. data/lib/llm/function/fork/job.rb +67 -0
  17. data/lib/llm/function/fork/task.rb +76 -0
  18. data/lib/llm/function/fork.rb +8 -0
  19. data/lib/llm/function/fork_group.rb +36 -0
  20. data/lib/llm/function/ractor/task.rb +13 -3
  21. data/lib/llm/function/task.rb +10 -2
  22. data/lib/llm/function.rb +24 -11
  23. data/lib/llm/mcp/command.rb +1 -1
  24. data/lib/llm/mcp/transport/http.rb +2 -2
  25. data/lib/llm/mcp.rb +7 -4
  26. data/lib/llm/object/kernel.rb +8 -2
  27. data/lib/llm/object.rb +75 -21
  28. data/lib/llm/{mcp/pipe.rb → pipe.rb} +9 -8
  29. data/lib/llm/provider/transport/http/execution.rb +1 -1
  30. data/lib/llm/provider/transport/http.rb +1 -1
  31. data/lib/llm/provider.rb +7 -0
  32. data/lib/llm/providers/bedrock/error_handler.rb +80 -0
  33. data/lib/llm/providers/bedrock/models.rb +109 -0
  34. data/lib/llm/providers/bedrock/request_adapter/completion.rb +153 -0
  35. data/lib/llm/providers/bedrock/request_adapter.rb +95 -0
  36. data/lib/llm/providers/bedrock/response_adapter/completion.rb +143 -0
  37. data/lib/llm/providers/bedrock/response_adapter/models.rb +34 -0
  38. data/lib/llm/providers/bedrock/response_adapter.rb +40 -0
  39. data/lib/llm/providers/bedrock/signature.rb +166 -0
  40. data/lib/llm/providers/bedrock/stream_decoder.rb +140 -0
  41. data/lib/llm/providers/bedrock/stream_parser.rb +201 -0
  42. data/lib/llm/providers/bedrock.rb +272 -0
  43. data/lib/llm/stream/queue.rb +1 -1
  44. data/lib/llm/version.rb +1 -1
  45. data/lib/llm.rb +27 -1
  46. data/llm.gemspec +2 -1
  47. metadata +33 -3
data/data/deepseek.json CHANGED
@@ -29,13 +29,13 @@
29
29
  },
30
30
  "open_weights": true,
31
31
  "cost": {
32
- "input": 0.28,
33
- "output": 0.42,
32
+ "input": 0.14,
33
+ "output": 0.28,
34
34
  "cache_read": 0.028
35
35
  },
36
36
  "limit": {
37
- "context": 131072,
38
- "output": 8192
37
+ "context": 1000000,
38
+ "output": 384000
39
39
  }
40
40
  },
41
41
  "deepseek-reasoner": {
@@ -62,13 +62,13 @@
62
62
  },
63
63
  "open_weights": true,
64
64
  "cost": {
65
- "input": 0.28,
66
- "output": 0.42,
65
+ "input": 0.14,
66
+ "output": 0.28,
67
67
  "cache_read": 0.028
68
68
  },
69
69
  "limit": {
70
- "context": 128000,
71
- "output": 64000
70
+ "context": 1000000,
71
+ "output": 384000
72
72
  }
73
73
  },
74
74
  "deepseek-v4-flash": {
data/data/openai.json CHANGED
@@ -230,8 +230,8 @@
230
230
  },
231
231
  "limit": {
232
232
  "context": 1050000,
233
- "input": 920000,
234
- "output": 130000
233
+ "input": 922000,
234
+ "output": 128000
235
235
  },
236
236
  "experimental": {
237
237
  "modes": {
@@ -1554,6 +1554,43 @@
1554
1554
  "output": 0
1555
1555
  }
1556
1556
  },
1557
+ "gpt-5.5-pro": {
1558
+ "id": "gpt-5.5-pro",
1559
+ "name": "GPT-5.5 Pro",
1560
+ "family": "gpt-pro",
1561
+ "attachment": true,
1562
+ "reasoning": true,
1563
+ "tool_call": true,
1564
+ "structured_output": true,
1565
+ "temperature": false,
1566
+ "knowledge": "2025-12-01",
1567
+ "release_date": "2026-04-23",
1568
+ "last_updated": "2026-04-23",
1569
+ "modalities": {
1570
+ "input": [
1571
+ "text",
1572
+ "image",
1573
+ "pdf"
1574
+ ],
1575
+ "output": [
1576
+ "text"
1577
+ ]
1578
+ },
1579
+ "open_weights": false,
1580
+ "cost": {
1581
+ "input": 30,
1582
+ "output": 180,
1583
+ "context_over_200k": {
1584
+ "input": 60,
1585
+ "output": 270
1586
+ }
1587
+ },
1588
+ "limit": {
1589
+ "context": 1050000,
1590
+ "input": 922000,
1591
+ "output": 128000
1592
+ }
1593
+ },
1557
1594
  "gpt-4.1": {
1558
1595
  "id": "gpt-4.1",
1559
1596
  "name": "GPT-4.1",
data/data/xai.json CHANGED
@@ -68,6 +68,41 @@
68
68
  "output": 4096
69
69
  }
70
70
  },
71
+ "grok-4.3": {
72
+ "id": "grok-4.3",
73
+ "name": "Grok 4.3",
74
+ "family": "grok",
75
+ "attachment": true,
76
+ "reasoning": true,
77
+ "tool_call": true,
78
+ "temperature": true,
79
+ "release_date": "2026-05-01",
80
+ "last_updated": "2026-05-01",
81
+ "modalities": {
82
+ "input": [
83
+ "text",
84
+ "image"
85
+ ],
86
+ "output": [
87
+ "text"
88
+ ]
89
+ },
90
+ "open_weights": false,
91
+ "cost": {
92
+ "input": 1.25,
93
+ "output": 2.5,
94
+ "cache_read": 0.2,
95
+ "context_over_200k": {
96
+ "input": 2.5,
97
+ "output": 5,
98
+ "cache_read": 0.4
99
+ }
100
+ },
101
+ "limit": {
102
+ "context": 1000000,
103
+ "output": 30000
104
+ }
105
+ },
71
106
  "grok-3-mini-fast": {
72
107
  "id": "grok-3-mini-fast",
73
108
  "name": "Grok 3 Mini Fast",
data/data/zai.json CHANGED
@@ -10,7 +10,7 @@
10
10
  "models": {
11
11
  "glm-5v-turbo": {
12
12
  "id": "glm-5v-turbo",
13
- "name": "glm-5v-turbo",
13
+ "name": "GLM-5V-Turbo",
14
14
  "family": "glm",
15
15
  "attachment": true,
16
16
  "reasoning": true,
@@ -10,10 +10,6 @@ module LLM::ActiveRecord
10
10
  # tools, schema, instructions, and concurrency are configured on the model
11
11
  # class and forwarded to an internal agent subclass.
12
12
  module ActsAsAgent
13
- EMPTY_HASH = LLM::ActiveRecord::ActsAsLLM::EMPTY_HASH
14
- DEFAULTS = LLM::ActiveRecord::ActsAsLLM::DEFAULTS
15
- Utils = LLM::ActiveRecord::ActsAsLLM::Utils
16
-
17
13
  module ClassMethods
18
14
  def model(model = nil)
19
15
  return agent.model if model.nil?
@@ -96,7 +92,7 @@ module LLM::ActiveRecord
96
92
  def llm
97
93
  options = self.class.llm_plugin_options
98
94
  return @llm if @llm
99
- @llm = Utils.resolve_provider(self, options, ActsAsAgent::EMPTY_HASH)
95
+ @llm = Utils.resolve_provider(self, options, EMPTY_HASH)
100
96
  @llm.tracer = Utils.resolve_option(self, options[:tracer]) if options[:tracer]
101
97
  @llm
102
98
  end
@@ -108,7 +104,7 @@ module LLM::ActiveRecord
108
104
  def ctx
109
105
  @ctx ||= begin
110
106
  options = self.class.llm_plugin_options
111
- params = Utils.resolve_options(self, options[:context], ActsAsAgent::EMPTY_HASH).dup
107
+ params = Utils.resolve_options(self, options[:context], EMPTY_HASH).dup
112
108
  ctx = self.class.agent.new(llm, params.compact)
113
109
  columns = Utils.columns(options)
114
110
  data = self[columns[:data_column]]
@@ -16,84 +16,6 @@ module LLM::ActiveRecord
16
16
  # handling JSON typecasting for the model. `provider:`, `context:`, and
17
17
  # `tracer:` can also be configured as symbols that are called on the model.
18
18
  module ActsAsLLM
19
- EMPTY_HASH = {}.freeze
20
- DEFAULTS = {
21
- data_column: :data,
22
- format: :string,
23
- tracer: nil,
24
- provider: nil,
25
- context: EMPTY_HASH
26
- }.freeze
27
-
28
- ##
29
- # Shared helper methods for the ORM wrapper.
30
- #
31
- # These utilities keep persistence plumbing out of the wrapped model's
32
- # method namespace so the injected surface stays focused on the runtime
33
- # API itself.
34
- # @api private
35
- module Utils
36
- ##
37
- # Resolves a single configured option against a model instance.
38
- # @return [Object]
39
- def self.resolve_option(obj, option)
40
- case option
41
- when Proc then obj.instance_exec(&option)
42
- when Symbol then obj.send(option)
43
- when Hash then option.dup
44
- else option
45
- end
46
- end
47
-
48
- ##
49
- # Resolves hash-like wrapper options against a model instance.
50
- # @return [Hash]
51
- def self.resolve_options(obj, option, empty_hash)
52
- case option
53
- when Proc, Symbol, Hash then resolve_option(obj, option)
54
- else empty_hash.dup
55
- end
56
- end
57
-
58
- ##
59
- # Serializes the runtime into the configured storage format.
60
- # @return [String, Hash]
61
- def self.serialize_context(ctx, format)
62
- case format
63
- when :string then ctx.to_json
64
- when :json, :jsonb then ctx.to_h
65
- else raise ArgumentError, "Unknown format: #{format.inspect}"
66
- end
67
- end
68
-
69
- ##
70
- # Maps wrapper options onto the record's storage columns.
71
- # @return [Hash]
72
- def self.columns(options)
73
- {
74
- data_column: options[:data_column]
75
- }.freeze
76
- end
77
-
78
- ##
79
- # Resolves the provider runtime for a record.
80
- # @return [LLM::Provider]
81
- def self.resolve_provider(obj, options, empty_hash)
82
- provider = resolve_option(obj, options[:provider])
83
- return provider if LLM::Provider === provider
84
- raise ArgumentError, "provider: must resolve to an LLM::Provider instance"
85
- end
86
-
87
- ##
88
- # Persists the runtime state and usage columns back onto the record.
89
- # @return [void]
90
- def self.save(obj, ctx, options)
91
- columns = self.columns(options)
92
- obj.assign_attributes(columns[:data_column] => serialize_context(ctx, options[:format]))
93
- obj.save!
94
- end
95
- end
96
-
97
19
  module Hooks
98
20
  ##
99
21
  # Called when hooks are extended onto an ActiveRecord model.
@@ -133,7 +55,7 @@ module LLM::ActiveRecord
133
55
  # @return [LLM::Response]
134
56
  def talk(...)
135
57
  options = self.class.llm_plugin_options
136
- ctx.talk(...).tap { Utils.save(self, ctx, options) }
58
+ ctx.talk(...).tap { Utils.save!(self, ctx, options) }
137
59
  end
138
60
 
139
61
  ##
@@ -142,7 +64,7 @@ module LLM::ActiveRecord
142
64
  # @return [LLM::Response]
143
65
  def respond(...)
144
66
  options = self.class.llm_plugin_options
145
- ctx.respond(...).tap { Utils.save(self, ctx, options) }
67
+ ctx.respond(...).tap { Utils.save!(self, ctx, options) }
146
68
  end
147
69
 
148
70
  ##
@@ -270,7 +192,7 @@ module LLM::ActiveRecord
270
192
  def llm
271
193
  options = self.class.llm_plugin_options
272
194
  return @llm if @llm
273
- @llm = Utils.resolve_provider(self, options, ActsAsLLM::EMPTY_HASH)
195
+ @llm = Utils.resolve_provider(self, options, EMPTY_HASH)
274
196
  @llm.tracer = Utils.resolve_option(self, options[:tracer]) if options[:tracer]
275
197
  @llm
276
198
  end
@@ -283,7 +205,7 @@ module LLM::ActiveRecord
283
205
  @ctx ||= begin
284
206
  options = self.class.llm_plugin_options
285
207
  columns = Utils.columns(options)
286
- params = Utils.resolve_options(self, options[:context], ActsAsLLM::EMPTY_HASH).dup
208
+ params = Utils.resolve_options(self, options[:context], EMPTY_HASH).dup
287
209
  ctx = LLM::Context.new(llm, params.compact)
288
210
  data = self[columns[:data_column]]
289
211
  if data.nil? || data == ""
@@ -1,4 +1,82 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "llm/active_record/acts_as_llm"
4
- require "llm/active_record/acts_as_agent"
3
+ module LLM::ActiveRecord
4
+ EMPTY_HASH = {}.freeze
5
+ DEFAULTS = {
6
+ data_column: :data,
7
+ format: :string,
8
+ tracer: nil,
9
+ provider: nil,
10
+ context: EMPTY_HASH
11
+ }.freeze
12
+
13
+ ##
14
+ # These utilities keep persistence plumbing out of the wrapped model's
15
+ # method namespace so the injected surface stays focused on the runtime
16
+ # API itself.
17
+ # @api private
18
+ module Utils
19
+ ##
20
+ # Resolves a single configured option against a model instance.
21
+ # @return [Object]
22
+ def self.resolve_option(obj, option)
23
+ case option
24
+ when Proc then obj.instance_exec(&option)
25
+ when Symbol then obj.send(option)
26
+ when Hash then option.dup
27
+ else option
28
+ end
29
+ end
30
+
31
+ ##
32
+ # Resolves hash-like wrapper options against a model instance.
33
+ # @return [Hash]
34
+ def self.resolve_options(obj, option, empty_hash)
35
+ case option
36
+ when Proc, Symbol, Hash then resolve_option(obj, option)
37
+ else empty_hash.dup
38
+ end
39
+ end
40
+
41
+ ##
42
+ # Serializes the runtime into the configured storage format.
43
+ # @return [String, Hash]
44
+ def self.serialize_context(ctx, format)
45
+ case format
46
+ when :string then ctx.to_json
47
+ when :json, :jsonb then ctx.to_h
48
+ else raise ArgumentError, "Unknown format: #{format.inspect}"
49
+ end
50
+ end
51
+
52
+ ##
53
+ # Maps wrapper options onto the record's storage columns.
54
+ # @return [Hash]
55
+ def self.columns(options)
56
+ {
57
+ data_column: options[:data_column]
58
+ }.freeze
59
+ end
60
+
61
+ ##
62
+ # Resolves the provider runtime for a record.
63
+ # @return [LLM::Provider]
64
+ def self.resolve_provider(obj, options, empty_hash)
65
+ provider = resolve_option(obj, options[:provider])
66
+ return provider if LLM::Provider === provider
67
+ raise ArgumentError, "provider: must resolve to an LLM::Provider instance"
68
+ end
69
+
70
+ ##
71
+ # Persists the runtime state and usage columns back onto the record.
72
+ # @return [void]
73
+ def self.save!(obj, ctx, options)
74
+ columns = self.columns(options)
75
+ obj.assign_attributes(columns[:data_column] => serialize_context(ctx, options[:format]))
76
+ obj.save!
77
+ end
78
+ end
79
+
80
+ require "llm/active_record/acts_as_llm"
81
+ require "llm/active_record/acts_as_agent"
82
+ end
data/lib/llm/agent.rb CHANGED
@@ -106,7 +106,8 @@ module LLM
106
106
  # - `:call`: sequential calls
107
107
  # - `:thread`: concurrent threads
108
108
  # - `:task`: concurrent async tasks
109
- # - `:fiber`: concurrent raw fibers
109
+ # - `:fiber`: concurrent scheduler-backed fibers
110
+ # - `:fork`: forked child processes
110
111
  # - `:ractor`: concurrent Ruby ractors for class-based tools; MCP tools are not supported,
111
112
  # and this mode is especially useful for CPU-bound tool work
112
113
  # - `[:thread, :ractor]`: the possible concurrency strategies to wait on, in the
@@ -149,12 +150,14 @@ module LLM
149
150
  # @option params [Array<LLM::Function>, nil] :tools Defaults to nil
150
151
  # @option params [Array<String>, nil] :skills Defaults to nil
151
152
  # @option params [#to_json, nil] :schema Defaults to nil
153
+ # @option params [LLM::Tracer, Proc, nil] :tracer Optional tracer override for this agent instance
152
154
  # @option params [Symbol, Array<Symbol>, nil] :concurrency Defaults to the agent class concurrency
153
155
  def initialize(llm, params = {})
154
156
  defaults = {model: self.class.model, tools: self.class.tools, skills: self.class.skills, schema: self.class.schema}.compact
155
157
  @concurrency = params.delete(:concurrency) || self.class.concurrency
156
158
  @llm = llm
157
- @tracer = resolve_option(self.class.tracer) unless self.class.tracer.nil?
159
+ tracer = params.key?(:tracer) ? params.delete(:tracer) : self.class.tracer
160
+ @tracer = resolve_option(tracer) unless tracer.nil?
158
161
  @ctx = LLM::Context.new(llm, defaults.merge({guard: true}).merge(params))
159
162
  end
160
163
 
@@ -395,8 +398,10 @@ module LLM
395
398
  def call_functions
396
399
  case concurrency || :call
397
400
  when :call then call(:functions)
398
- when :thread, :task, :fiber, :ractor, Array then wait(concurrency)
399
- else raise ArgumentError, "Unknown concurrency: #{concurrency.inspect}. Expected :call, :thread, :task, :fiber, :ractor, or an array of queued task types"
401
+ when :thread, :task, :fiber, :fork, :ractor, Array then wait(concurrency)
402
+ else raise ArgumentError, "Unknown concurrency: #{concurrency.inspect}. " \
403
+ "Expected :call, :thread, :task, :fiber, :fork, :ractor, " \
404
+ "or an array of the mentioned options"
400
405
  end
401
406
  end
402
407
 
data/lib/llm/error.rb CHANGED
@@ -78,4 +78,8 @@ module LLM
78
78
  ##
79
79
  # When {LLM::Registry} can't map a registry
80
80
  NoSuchRegistryError = Class.new(Error)
81
+
82
+ ##
83
+ # When an optional runtime dependency cannot be required
84
+ LoadError = Class.new(Error)
81
85
  end
@@ -26,7 +26,8 @@ class LLM::Function
26
26
  # Controls concurrency strategy:
27
27
  # - `:thread`: Use threads
28
28
  # - `:task`: Use async tasks (requires async gem)
29
- # - `:fiber`: Use raw fibers
29
+ # - `:fiber`: Use scheduler-backed fibers (requires Fiber.scheduler)
30
+ # - `:fork`: Use forked child processes
30
31
  # - `:ractor`: Use Ruby ractors (class-based tools only; MCP tools are not supported)
31
32
  #
32
33
  # @return [LLM::Function::ThreadGroup, LLM::Function::TaskGroup, LLM::Function::FiberGroup, LLM::Function::Ractor::Group]
@@ -38,10 +39,12 @@ class LLM::Function
38
39
  ThreadGroup.new(map { |fn| fn.spawn(:thread) })
39
40
  when :fiber
40
41
  FiberGroup.new(map { |fn| fn.spawn(:fiber) })
42
+ when :fork
43
+ Fork::Group.new(map { |fn| fn.spawn(:fork) })
41
44
  when :ractor
42
45
  Ractor::Group.new(map { |fn| fn.spawn(:ractor) })
43
46
  else
44
- raise ArgumentError, "Unknown strategy: #{strategy.inspect}. Expected :thread, :task, :fiber, or :ractor"
47
+ raise ArgumentError, "Unknown strategy: #{strategy.inspect}. Expected :thread, :task, :fiber, :fork, or :ractor"
45
48
  end
46
49
  end
47
50
 
@@ -53,7 +56,8 @@ class LLM::Function
53
56
  # Controls concurrency strategy:
54
57
  # - `:thread`: Use threads
55
58
  # - `:task`: Use async tasks (requires async gem)
56
- # - `:fiber`: Use raw fibers
59
+ # - `:fiber`: Use scheduler-backed fibers (requires Fiber.scheduler)
60
+ # - `:fork`: Use forked child processes
57
61
  # - `:ractor`: Use Ruby ractors (class-based tools only; MCP tools are not supported)
58
62
  #
59
63
  # @return [Array<LLM::Function::Return>]
@@ -4,10 +4,10 @@ class LLM::Function
4
4
  ##
5
5
  # The {LLM::Function::FiberGroup} class wraps an array of
6
6
  # {Fiber} objects that are running {LLM::Function} calls
7
- # concurrently using raw fibers.
7
+ # concurrently using scheduler-backed fibers.
8
8
  #
9
9
  # This class provides the same interface as {LLM::Function::ThreadGroup}
10
- # but uses raw fibers for lightweight concurrency without the async gem.
10
+ # but uses scheduler-backed fibers for cooperative concurrency.
11
11
  #
12
12
  # @example
13
13
  # llm = LLM.openai(key: ENV["KEY"])
@@ -90,10 +90,16 @@ class LLM::Function
90
90
  # order as the original fibers.
91
91
  def wait
92
92
  @fibers.map do |fiber|
93
- fiber.resume if fiber.alive?
93
+ fiber.alive? ? scheduler.run : nil
94
94
  fiber.value
95
95
  end
96
96
  end
97
97
  alias_method :value, :wait
98
+
99
+ private
100
+
101
+ def scheduler
102
+ Fiber.scheduler
103
+ end
98
104
  end
99
105
  end
@@ -0,0 +1,67 @@
1
+ # frozen_string_literal: true
2
+
3
+ class LLM::Function
4
+ ##
5
+ # The {LLM::Function::Fork::Job} class represents a single fork-backed
6
+ # function call inside the child process.
7
+ #
8
+ # It is executed in the forked process and is responsible for running the
9
+ # resolved tool instance, handling control messages such as interrupts, and
10
+ # writing the final result back to the parent process.
11
+ class Fork::Job
12
+ ##
13
+ # @param [LLM::Function] function
14
+ # @param [LLM::Object] ch
15
+ # @return [LLM::Function::Fork::Job]
16
+ def initialize(function, ch)
17
+ @function = function
18
+ @ch = ch
19
+ end
20
+
21
+ ##
22
+ # @return [void]
23
+ def call
24
+ runner = @function.runner
25
+ controller = setup(runner)
26
+ @ch.result.write([:result, call!(runner)])
27
+ rescue => ex
28
+ @ch.result.write([:result, error(ex)])
29
+ ensure
30
+ controller&.kill
31
+ [@ch.control, @ch.result].each { _1.close unless _1.closed? }
32
+ end
33
+
34
+ private
35
+
36
+ def call!(runner)
37
+ kwargs = if Hash === @function.arguments
38
+ @function.arguments.transform_keys(&:to_sym)
39
+ else
40
+ @function.arguments
41
+ end
42
+ {id: @function.id, name: @function.name, value: runner.call(**kwargs)}
43
+ end
44
+
45
+ def error(ex)
46
+ {
47
+ id: @function.id,
48
+ name: @function.name,
49
+ value: {error: true, type: ex.class.name, message: ex.message}
50
+ }
51
+ end
52
+
53
+ def setup(runner)
54
+ ready = Queue.new
55
+ thread = Thread.new do
56
+ ready << true
57
+ kind = @ch.control.recv
58
+ next unless kind == :interrupt
59
+ hook = %i[on_cancel on_interrupt].find { runner.respond_to?(_1) }
60
+ runner.public_send(hook) if hook
61
+ rescue IOError, ArgumentError
62
+ end
63
+ ready.pop
64
+ thread
65
+ end
66
+ end
67
+ end
@@ -0,0 +1,76 @@
1
+ # frozen_string_literal: true
2
+
3
+ class LLM::Function
4
+ ##
5
+ # The {LLM::Function::Fork::Task} class wraps a fork-backed function call
6
+ # and exchanges control and result messages with the child process.
7
+ class Fork::Task
8
+ ##
9
+ # @param [LLM::Function] function
10
+ # @param [LLM::Tracer, nil] tracer
11
+ # @param [Object, nil] span
12
+ # @return [LLM::Function::Fork::Task]
13
+ def initialize(function, tracer: nil, span: nil)
14
+ @function = function
15
+ @tracer = tracer
16
+ @span = span
17
+ @waited = false
18
+ end
19
+
20
+ ##
21
+ # @return [LLM::Function::Fork::Task]
22
+ def spawn
23
+ @ch = LLM::Object.from(control: xchan(:marshal), result: xchan(:marshal))
24
+ @pid = Kernel.fork { Fork::Job.new(@function, @ch).call }
25
+ self
26
+ end
27
+
28
+ ##
29
+ # @return [Boolean]
30
+ def alive?
31
+ return false if @waited
32
+ result = ::Process.waitpid(@pid, ::Process::WNOHANG)
33
+ @waited = !result.nil?
34
+ !@waited
35
+ rescue Errno::ECHILD
36
+ @waited = true
37
+ false
38
+ end
39
+
40
+ ##
41
+ # @return [nil]
42
+ def interrupt!
43
+ return nil if @waited
44
+ @ch.control.write(:interrupt)
45
+ nil
46
+ rescue Errno::ESRCH, IOError
47
+ nil
48
+ end
49
+ alias_method :cancel!, :interrupt!
50
+
51
+ ##
52
+ # @return [LLM::Function::Return]
53
+ def wait
54
+ kind, data = @ch.result.recv
55
+ raise ArgumentError, "Unknown fork message: #{kind.inspect}" unless kind == :result
56
+ result = Return.new(data[:id], data[:name], data[:value])
57
+ reap
58
+ @tracer&.on_tool_finish(result:, span: @span)
59
+ result
60
+ ensure
61
+ reap
62
+ [@ch.control, @ch.result].each { _1.close unless _1.closed? }
63
+ end
64
+ alias_method :value, :wait
65
+
66
+ private
67
+
68
+ def reap
69
+ return if @waited
70
+ ::Process.waitpid(@pid)
71
+ @waited = true
72
+ rescue Errno::ECHILD
73
+ @waited = true
74
+ end
75
+ end
76
+ end
@@ -0,0 +1,8 @@
1
+ # frozen_string_literal: true
2
+
3
+ class LLM::Function
4
+ module Fork
5
+ require_relative "fork/job"
6
+ require_relative "fork/task"
7
+ end
8
+ end
@@ -0,0 +1,36 @@
1
+ # frozen_string_literal: true
2
+
3
+ class LLM::Function
4
+ ##
5
+ # The {LLM::Function::Fork::Group} class wraps an array of
6
+ # {LLM::Function::Task} objects that are running in forked child processes.
7
+ class Fork::Group
8
+ ##
9
+ # @param [Array<LLM::Function::Task>] tasks
10
+ # @return [LLM::Function::Fork::Group]
11
+ def initialize(tasks)
12
+ @tasks = tasks
13
+ end
14
+
15
+ ##
16
+ # @return [Boolean]
17
+ def alive?
18
+ @tasks.any?(&:alive?)
19
+ end
20
+
21
+ ##
22
+ # @return [nil]
23
+ def interrupt!
24
+ @tasks.each(&:interrupt!)
25
+ nil
26
+ end
27
+ alias_method :cancel!, :interrupt!
28
+
29
+ ##
30
+ # @return [Array<LLM::Function::Return>]
31
+ def wait
32
+ @tasks.map(&:wait)
33
+ end
34
+ alias_method :value, :wait
35
+ end
36
+ end