llm.rb 4.18.0 → 4.20.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 42357f605b8995c24722c0a2d285d98de0e99fe6f9687e2f4451339dcfbc2e87
4
- data.tar.gz: 2953c742deeec6dc7c7acc9ed77f4520846a92299a4df94e415416d5c623033c
3
+ metadata.gz: d57e4d2af8568cdd6328c0a956fddb40aecbd2943a268b595dbd87ee811553a4
4
+ data.tar.gz: 8cf576171c3bfd7328b8316d42aecbb364a7e4d0a6bbff707cdc65cc9ddfbd01
5
5
  SHA512:
6
- metadata.gz: 534fad616562768b0a851142f411117634d824a92187577d6baad4427c1e0eddf7a765dbfe3b662f76ba52011d760803793473a1881b4f69b8f4c7f9fc16bdc3
7
- data.tar.gz: f1d90ad7f7b71f4817fcfa494f48a4e7f189560674d7b86d618f1f4e217e2a34189e15156e71ecdcfffe81e9588315f4759a82f4ec3eaed3846a29c77a906a66
6
+ metadata.gz: 4d9087909b30c47e5ddb9c9407b53efdbbd2a3732629579dfd53415d60e1457a56b738b9942b578434888b97ee597d78955f21a1af5235847e6daa944810e8d7
7
+ data.tar.gz: a890e08d0129ccfa18188efb503e8ac32e4d5424a79851bf2bfa15424b713cae4431afeb78fe7645437d8b3d0c11cc2d975d7415279a2880ca5cd557de57cf5f
data/CHANGELOG.md CHANGED
@@ -2,8 +2,59 @@
2
2
 
3
3
  ## Unreleased
4
4
 
5
+ Changes since `v4.20.0`.
6
+
7
+ ## v4.20.0
8
+
9
+ Changes since `v4.19.0`.
10
+
11
+ This release adds better support for tagged prompt content. `LLM::Context`
12
+ can now serialize and restore `image_url`, `local_file`, and `remote_file`
13
+ content cleanly, and `LLM::Message` now exposes helpers for inspecting
14
+ tagged image and file attachments.
15
+
16
+ ### Change
17
+
18
+ * **Round-trip tagged prompt objects through `LLM::Context`** <br>
19
+ Teach `LLM::Context` serialization and restore to preserve
20
+ `image_url`, `local_file`, and `remote_file` content across
21
+ `to_json` / `restore`.
22
+
23
+ * **Add attachment helpers to `LLM::Message`** <br>
24
+ Add `image_url?`, `image_urls`, `file?`, and `files` so callers can
25
+ inspect messages for tagged image and file content more directly.
26
+
27
+ ## v4.19.0
28
+
5
29
  Changes since `v4.18.0`.
6
30
 
31
+ This release tightens the ActiveRecord and ORM integration layer. It adds
32
+ inline agent DSL blocks to `acts_as_agent` so agent defaults can be defined
33
+ where the wrapper is declared, and it exposes the resolved provider through
34
+ public `llm` methods on the ActiveRecord and Sequel wrappers.
35
+
36
+ ### Change
37
+
38
+ * **Make ORM provider access public through `llm`** <br>
39
+ Expose the resolved provider on the Sequel plugin and the ActiveRecord
40
+ `acts_as_llm` / `acts_as_agent` wrappers through a public `llm` method.
41
+
42
+ * **Allow inline agent DSL blocks in `acts_as_agent`** <br>
43
+ Let ActiveRecord models configure `model`, `tools`, `schema`,
44
+ `instructions`, and `concurrency` directly inside the `acts_as_agent`
45
+ declaration block.
46
+
47
+ ## v4.18.0
48
+
49
+ Changes since `v4.17.0`.
50
+
51
+ This release improves tracing and tool execution behavior across llm.rb.
52
+ It makes provider tracers default to the provider instance, adds
53
+ `LLM::Provider#with_tracer` for scoped overrides, restores tool tracing for
54
+ concurrent and streamed tool execution, extends streamed tracing to MCP tools,
55
+ and adds symbol-based ORM option hooks alongside experimental ractor tool
56
+ concurrency.
57
+
7
58
  ### Change
8
59
 
9
60
  * **Make provider tracers default to the provider instance** <br>
@@ -42,17 +93,6 @@ Changes since `v4.18.0`.
42
93
  is especially useful for CPU-bound tools, while `:task`, `:fiber`, or
43
94
  `:thread` may be a better fit for I/O-bound work.
44
95
 
45
- ## v4.18.0
46
-
47
- Changes since `v4.17.0`.
48
-
49
- This release improves tracing and tool execution behavior across llm.rb.
50
- It makes provider tracers default to the provider instance, adds
51
- `LLM::Provider#with_tracer` for scoped overrides, restores tool tracing for
52
- concurrent and streamed tool execution, extends streamed tracing to MCP tools,
53
- and adds symbol-based ORM option hooks alongside experimental ractor tool
54
- concurrency.
55
-
56
96
  ## v4.17.0
57
97
 
58
98
  Changes since `v4.16.1`.
data/README.md CHANGED
@@ -4,7 +4,7 @@
4
4
  <p align="center">
5
5
  <a href="https://0x1eef.github.io/x/llm.rb?rebuild=1"><img src="https://img.shields.io/badge/docs-0x1eef.github.io-blue.svg" alt="RubyDoc"></a>
6
6
  <a href="https://opensource.org/license/0bsd"><img src="https://img.shields.io/badge/License-0BSD-orange.svg?" alt="License"></a>
7
- <a href="https://github.com/llmrb/llm.rb/tags"><img src="https://img.shields.io/badge/version-4.18.0-green.svg?" alt="Version"></a>
7
+ <a href="https://github.com/llmrb/llm.rb/tags"><img src="https://img.shields.io/badge/version-4.20.0-green.svg?" alt="Version"></a>
8
8
  </p>
9
9
 
10
10
  ## About
@@ -254,11 +254,18 @@ require "active_record"
254
254
  require "llm/active_record"
255
255
 
256
256
  class Ticket < ApplicationRecord
257
- acts_as_agent provider: -> { { key: ENV["#{provider.upcase}_SECRET"], persistent: true } }
258
- model "gpt-5.4-mini"
259
- instructions "You are a concise support assistant."
260
- tools SearchDocs, Escalate
261
- concurrency :thread
257
+ acts_as_agent provider: :set_provider do
258
+ model "gpt-5.4-mini"
259
+ instructions "You are a concise support assistant."
260
+ tools SearchDocs, Escalate
261
+ concurrency :thread
262
+ end
263
+
264
+ private
265
+
266
+ def set_provider
267
+ { key: ENV["#{provider.upcase}_SECRET"], persistent: true }
268
+ end
262
269
  end
263
270
 
264
271
  ticket = Ticket.create!(provider: "openai", model: "gpt-5.4-mini")
@@ -71,18 +71,21 @@ module LLM::ActiveRecord
71
71
  # @option options [Proc, Symbol, LLM::Tracer, nil] :tracer
72
72
  # Optional tracer, method name, or proc that resolves to one and is
73
73
  # assigned through `llm.tracer = ...` on the resolved provider.
74
+ # @yield
75
+ # Evaluated in the model class after the wrapper is installed, so agent
76
+ # DSL methods such as `model`, `tools`, `schema`, `instructions`, and
77
+ # `concurrency` can be configured inline.
74
78
  # @return [void]
75
- def acts_as_agent(options = EMPTY_HASH)
79
+ def acts_as_agent(options = EMPTY_HASH, &block)
76
80
  options = DEFAULTS.merge(options)
77
81
  usage_columns = DEFAULT_USAGE_COLUMNS.merge(options[:usage_columns] || EMPTY_HASH)
78
82
  class_attribute :llm_agent_options, instance_accessor: false, default: DEFAULTS unless respond_to?(:llm_agent_options)
79
83
  self.llm_agent_options = options.merge(usage_columns: usage_columns.freeze).freeze
80
84
  extend Hooks
85
+ class_exec(&block) if block
81
86
  end
82
87
 
83
88
  module InstanceMethods
84
- private
85
-
86
89
  ##
87
90
  # Returns the resolved provider instance for this record.
88
91
  # @return [LLM::Provider]
@@ -96,6 +99,8 @@ module LLM::ActiveRecord
96
99
  @llm
97
100
  end
98
101
 
102
+ private
103
+
99
104
  ##
100
105
  # @return [LLM::Agent]
101
106
  def ctx
@@ -206,8 +206,6 @@ module LLM::ActiveRecord
206
206
  ctx.tracer
207
207
  end
208
208
 
209
- private
210
-
211
209
  ##
212
210
  # Returns the resolved provider instance for this record.
213
211
  # @return [LLM::Provider]
@@ -221,6 +219,8 @@ module LLM::ActiveRecord
221
219
  @llm
222
220
  end
223
221
 
222
+ private
223
+
224
224
  ##
225
225
  # @return [LLM::Context]
226
226
  def ctx
@@ -4,6 +4,32 @@ class LLM::Context
4
4
  ##
5
5
  # @api private
6
6
  module Deserializer
7
+ ##
8
+ # Restore a saved context state
9
+ # @param [String, nil] path
10
+ # The path to a JSON file
11
+ # @param [String, nil] string
12
+ # A raw JSON string
13
+ # @param [Hash, nil] data
14
+ # A parsed context payload
15
+ # @raise [SystemCallError]
16
+ # Might raise a number of SystemCallError subclasses
17
+ # @return [LLM::Context]
18
+ def deserialize(path: nil, string: nil, data: nil)
19
+ ctx = if data
20
+ data
21
+ elsif path.nil? and string.nil?
22
+ raise ArgumentError, "a path, string, or data payload is required"
23
+ elsif path
24
+ LLM.json.load(::File.binread(path))
25
+ else
26
+ LLM.json.load(string)
27
+ end
28
+ @messages.concat [*ctx["messages"]].map { deserialize_message(_1) }
29
+ self
30
+ end
31
+ alias_method :restore, :deserialize
32
+
7
33
  ##
8
34
  # @param [Hash] payload
9
35
  # @return [LLM::Message]
@@ -14,12 +40,36 @@ class LLM::Context
14
40
  usage = payload["usage"]
15
41
  reasoning_content = payload["reasoning_content"]
16
42
  extra = {tool_calls:, original_tool_calls:, tools: @params[:tools], usage:, reasoning_content:}.compact
17
- content = returns.nil? ? payload["content"] : returns
43
+ content = returns.nil? ? deserialize_content(payload["content"]) : returns
18
44
  LLM::Message.new(payload["role"], content, extra)
19
45
  end
20
46
 
21
47
  private
22
48
 
49
+ def deserialize_content(content)
50
+ case content
51
+ when Array
52
+ content.map { deserialize_content(_1) }
53
+ when Hash
54
+ deserialize_object(content)
55
+ else
56
+ content
57
+ end
58
+ end
59
+
60
+ def deserialize_object(object)
61
+ case object["__llm_kind__"]
62
+ when "image_url"
63
+ LLM::Object.from(value: object["value"], kind: :image_url)
64
+ when "local_file"
65
+ LLM::Object.from(value: LLM.File(object["path"]), kind: :local_file)
66
+ when "remote_file"
67
+ LLM::Object.from(value: LLM::Object.from(object["value"] || {}), kind: :remote_file)
68
+ else
69
+ object
70
+ end
71
+ end
72
+
23
73
  def deserialize_tool_calls(items)
24
74
  items ||= []
25
75
  items.empty? ? nil : items
@@ -0,0 +1,52 @@
1
+ # frozen_string_literal: true
2
+
3
+ class LLM::Context
4
+ ##
5
+ # @api private
6
+ module Serializer
7
+ private
8
+
9
+ def serialize_message(message)
10
+ h = message.to_h
11
+ h[:content] = serialize_content(h[:content])
12
+ h
13
+ end
14
+
15
+ def serialize_content(content)
16
+ case content
17
+ when Array
18
+ content.map { serialize_content(_1) }
19
+ when LLM::Object
20
+ serialize_object(content)
21
+ else
22
+ content
23
+ end
24
+ end
25
+
26
+ def serialize_object(object)
27
+ case object.kind
28
+ when :image_url
29
+ {__llm_kind__: "image_url", value: object.value}
30
+ when :local_file
31
+ {__llm_kind__: "local_file", path: object.value.path}
32
+ when :remote_file
33
+ {__llm_kind__: "remote_file", value: serialize_remote_file(object.value)}
34
+ else
35
+ object.to_h
36
+ end
37
+ end
38
+
39
+ def serialize_remote_file(file)
40
+ {
41
+ "file?" => file.respond_to?(:file?) ? file.file? : true,
42
+ "id" => (file.id if file.respond_to?(:id)),
43
+ "filename" => (file.filename if file.respond_to?(:filename)),
44
+ "mime_type" => (file.mime_type if file.respond_to?(:mime_type)),
45
+ "uri" => (file.uri if file.respond_to?(:uri)),
46
+ "file_type" => (file.file_type if file.respond_to?(:file_type)),
47
+ "name" => (file.name if file.respond_to?(:name)),
48
+ "display_name" => (file.display_name if file.respond_to?(:display_name))
49
+ }.compact
50
+ end
51
+ end
52
+ end
data/lib/llm/context.rb CHANGED
@@ -34,7 +34,9 @@ module LLM
34
34
  # ctx.talk(prompt)
35
35
  # ctx.messages.each { |m| puts "[#{m.role}] #{m.content}" }
36
36
  class Context
37
+ require_relative "context/serializer"
37
38
  require_relative "context/deserializer"
39
+ include Serializer
38
40
  include Deserializer
39
41
 
40
42
  ##
@@ -279,7 +281,7 @@ module LLM
279
281
  ##
280
282
  # @return [Hash]
281
283
  def to_h
282
- {schema_version: 1, model:, messages:}
284
+ {schema_version: 1, model:, messages: @messages.map { serialize_message(_1) }}
283
285
  end
284
286
 
285
287
  ##
@@ -299,36 +301,10 @@ module LLM
299
301
  # Might raise a number of SystemCallError subclasses
300
302
  # @return [void]
301
303
  def serialize(path:)
302
- ::File.binwrite path, LLM.json.dump(self)
304
+ ::File.binwrite path, LLM.json.dump(to_h)
303
305
  end
304
306
  alias_method :save, :serialize
305
307
 
306
- ##
307
- # Restore a saved context state
308
- # @param [String, nil] path
309
- # The path to a JSON file
310
- # @param [String, nil] string
311
- # A raw JSON string
312
- # @param [Hash, nil] data
313
- # A parsed context payload
314
- # @raise [SystemCallError]
315
- # Might raise a number of SystemCallError subclasses
316
- # @return [LLM::Context]
317
- def deserialize(path: nil, string: nil, data: nil)
318
- ctx = if data
319
- data
320
- elsif path.nil? and string.nil?
321
- raise ArgumentError, "a path, string, or data payload is required"
322
- elsif path
323
- LLM.json.load(::File.binread(path))
324
- else
325
- LLM.json.load(string)
326
- end
327
- @messages.concat [*ctx["messages"]].map { deserialize_message(_1) }
328
- self
329
- end
330
- alias_method :restore, :deserialize
331
-
332
308
  ##
333
309
  # @return [LLM::Cost]
334
310
  # Returns an _approximate_ cost for a given context
data/lib/llm/message.rb CHANGED
@@ -74,6 +74,36 @@ module LLM
74
74
  extra.reasoning_content
75
75
  end
76
76
 
77
+ ##
78
+ # Returns true when a message contains an image URL
79
+ # @return [Boolean]
80
+ def image_url?
81
+ image_urls.any?
82
+ end
83
+
84
+ ##
85
+ # Returns image URL content items from the message
86
+ # @return [Array<LLM::Object>]
87
+ def image_urls
88
+ content_items.select { LLM::Object === _1 && _1.kind == :image_url }
89
+ end
90
+
91
+ ##
92
+ # Returns true when a message contains a local or remote file
93
+ # @return [Boolean]
94
+ def file?
95
+ files.any?
96
+ end
97
+
98
+ ##
99
+ # Returns local and remote file content items from the message
100
+ # @return [Array<LLM::Object>]
101
+ def files
102
+ content_items.select do
103
+ LLM::Object === _1 && [:local_file, :remote_file].include?(_1.kind)
104
+ end
105
+ end
106
+
77
107
  ##
78
108
  # @return [Array<LLM::Function>]
79
109
  def functions
@@ -178,5 +208,9 @@ module LLM
178
208
  tools = extra.tools || response&.__tools__ || []
179
209
  tools.map { _1.respond_to?(:function) ? _1.function : _1 }
180
210
  end
211
+
212
+ def content_items
213
+ Array(content)
214
+ end
181
215
  end
182
216
  end
@@ -224,8 +224,6 @@ module LLM::Sequel
224
224
  ctx.tracer
225
225
  end
226
226
 
227
- private
228
-
229
227
  ##
230
228
  # Returns the resolved provider instance for this record.
231
229
  # @return [LLM::Provider]
@@ -239,6 +237,8 @@ module LLM::Sequel
239
237
  @llm
240
238
  end
241
239
 
240
+ private
241
+
242
242
  ##
243
243
  # @return [LLM::Context]
244
244
  def ctx
data/lib/llm/version.rb CHANGED
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module LLM
4
- VERSION = "4.18.0"
4
+ VERSION = "4.20.0"
5
5
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: llm.rb
3
3
  version: !ruby/object:Gem::Version
4
- version: 4.18.0
4
+ version: 4.20.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Antar Azri
@@ -231,6 +231,7 @@ files:
231
231
  - lib/llm/buffer.rb
232
232
  - lib/llm/context.rb
233
233
  - lib/llm/context/deserializer.rb
234
+ - lib/llm/context/serializer.rb
234
235
  - lib/llm/contract.rb
235
236
  - lib/llm/contract/completion.rb
236
237
  - lib/llm/cost.rb