llm.rb 4.2.0 → 4.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 567ae33357581ae1602a337ee53dbc86328b4b6c3ee5af5f0b86cf810c039e64
4
- data.tar.gz: 4a3d332aad0a2f824966a850c6be36e48894a871d1831f70527e46df5614b207
3
+ metadata.gz: f35a8e2b21d2bf7914c9b261897eaa6d9c6d0ff03dc51c0e751f86d22e08f093
4
+ data.tar.gz: 92ffab3eab12fd6393ef8e324360d13e9725403a9571cf6c28427be090021965
5
5
  SHA512:
6
- metadata.gz: 6c04ea5dcf20e9757b0bac1d0eb4cc27176fe966f4ad6fd0f5f06b708d8653522be25b454c9e831f2a5d30fa7676f04d289692fb94ae3bd06f98ab0576ccf7f3
7
- data.tar.gz: 2f0d46d75e75382a601863fc0c10e4218efb3029c0650d021586007895d6647fd006eeffd7c6e7027204fcb75168be0e4f51b974d08df95397f1a233baa4239b
6
+ metadata.gz: 02cdeb6b969b4ec2d76ffce29236fb8c189cdf3a71ea121e2b218ffb8fdadff0005cfd4211bcdac6e17b9814f9533949f0bdec19efdd8fb2262096d5bd440dde
7
+ data.tar.gz: 3cf0abeefd9927df9e600a694387e0f7d334f9c614b6e64cd54cf3e5f82b6a73895d74c392a5a6f68a92c529155cfb3db067bf8b2ef85e0df4505105c759870c
data/README.md CHANGED
@@ -4,7 +4,7 @@
4
4
  <p align="center">
5
5
  <a href="https://0x1eef.github.io/x/llm.rb?rebuild=1"><img src="https://img.shields.io/badge/docs-0x1eef.github.io-blue.svg" alt="RubyDoc"></a>
6
6
  <a href="https://opensource.org/license/0bsd"><img src="https://img.shields.io/badge/License-0BSD-orange.svg?" alt="License"></a>
7
- <a href="https://github.com/llmrb/llm.rb/tags"><img src="https://img.shields.io/badge/version-4.2.0-green.svg?" alt="Version"></a>
7
+ <a href="https://github.com/llmrb/llm.rb/tags"><img src="https://img.shields.io/badge/version-4.3.0-green.svg?" alt="Version"></a>
8
8
  </p>
9
9
 
10
10
  ## About
@@ -39,7 +39,7 @@ llm = LLM.openai(key: ENV["KEY"])
39
39
  ses = LLM::Session.new(llm, stream: $stdout)
40
40
  loop do
41
41
  print "> "
42
- ses.talk(STDIN.gets)
42
+ ses.talk(STDIN.gets || break)
43
43
  puts
44
44
  end
45
45
  ```
@@ -193,6 +193,7 @@ ses.talk(prompt)
193
193
 
194
194
  #### Chat, Agents
195
195
  - 🧠 Stateless + stateful chat (completions + responses)
196
+ - 💾 Save and restore sessions across processes
196
197
  - 🤖 Tool calling / function execution
197
198
  - 🔁 Agent tool-call auto-execution (bounded)
198
199
  - 🗂️ JSON Schema structured output
@@ -383,6 +384,60 @@ ses.talk "Hello world!"
383
384
  ses.talk "Adios."
384
385
  ```
385
386
 
387
+ #### Serialization
388
+
389
+ [LLM::Session](https://0x1eef.github.io/x/llm.rb/LLM/Session.html) can be
390
+ serialized and deserialized across process boundaries and persisted to
391
+ storage such as files, a `jsonb` column (PostgreSQL), or other backends
392
+ through a JSON representation of the history encapsulated by
393
+ [LLM::Session](https://0x1eef.github.io/x/llm.rb/LLM/Session.html)
394
+ &ndash; inclusive of tool metadata as well:
395
+
396
+ * Process 1
397
+ ```ruby
398
+ #!/usr/bin/env ruby
399
+ require "llm"
400
+
401
+ llm = LLM.openai(key: ENV["KEY"])
402
+ ses = LLM::Session.new(llm)
403
+ ses.talk "Howdy partner"
404
+ ses.talk "I'll see you later"
405
+ ses.save(path: "session.json")
406
+ ```
407
+ * Process 2
408
+ ```ruby
409
+ #!/usr/bin/env ruby
410
+ require "llm"
411
+ require "pp"
412
+
413
+ llm = LLM.openai(key: ENV["KEY"])
414
+ ses = LLM::Session.new(llm)
415
+ ses.restore(path: "session.json")
416
+ ses.talk "Howdy partner. I'm back"
417
+ pp ses.messages
418
+ ```
419
+
420
+ But how does it work without a file ? The [LLM::Session](https://0x1eef.github.io/x/llm.rb/LLM/Session.html)
421
+ class implements `#to_json` and it can be used to obtain a JSON representation
422
+ of a session that can be stored in a `jsonb` column in PostgreSQL, or any
423
+ other storage backend. The session can then be restored from the JSON
424
+ representation via the restore method and its `string` argument:
425
+
426
+ ```ruby
427
+ #!/usr/bin/env ruby
428
+ require "llm"
429
+
430
+ llm = LLM.openai(key: ENV["KEY"])
431
+ ses1 = LLM::Session.new(llm)
432
+ ses1.talk "Howdy partner"
433
+ ses1.talk "I'll see you later"
434
+
435
+ json = ses1.to_json
436
+ ses2 = LLM::Session.new(llm)
437
+ ses2.restore(string: json)
438
+ ses2.talk "Howdy partner. I'm back"
439
+ ```
440
+
386
441
  #### Thread Safety
387
442
 
388
443
  The llm.rb library is thread-safe and can be used in a multi-threaded
@@ -719,6 +774,12 @@ llm.rb can be installed via rubygems.org:
719
774
 
720
775
  gem install llm.rb
721
776
 
777
+ ## Sources
778
+
779
+ * [GitHub.com](https://github.com/llmrb/llm.rb)
780
+ * [GitLab.com](https://gitlab.com/llmrb/llm.rb)
781
+ * [Codeberg.org](https://codeberg.org/llmrb/llm.rb)
782
+
722
783
  ## License
723
784
 
724
785
  [BSD Zero Clause](https://choosealicense.com/licenses/0bsd/)
data/lib/llm/agent.rb CHANGED
@@ -204,6 +204,22 @@ module LLM
204
204
  @ses.model
205
205
  end
206
206
 
207
+ ##
208
+ # @param (see LLM::Session#serialize)
209
+ # @return (see LLM::Session#serialize)
210
+ def serialize(**kw)
211
+ @ses.serialize(**kw)
212
+ end
213
+ alias_method :save, :serialize
214
+
215
+ ##
216
+ # @param (see LLM::Session#deserialize)
217
+ # @return (see LLM::Session#deserialize)
218
+ def deserialize(**kw)
219
+ @ses.deserialize(**kw)
220
+ end
221
+ alias_method :restore, :deserialize
222
+
207
223
  private
208
224
 
209
225
  def apply_instructions(prompt)
data/lib/llm/bot.rb CHANGED
@@ -23,6 +23,9 @@ module LLM
23
23
  # ses.talk(prompt)
24
24
  # ses.messages.each { |m| puts "[#{m.role}] #{m.content}" }
25
25
  class Session
26
+ require_relative "session/deserializer"
27
+ include Deserializer
28
+
26
29
  ##
27
30
  # Returns an Enumerable for the messages in a conversation
28
31
  # @return [LLM::Buffer<LLM::Message>]
@@ -60,7 +63,9 @@ module LLM
60
63
  params = params.merge(messages: [*@messages.to_a, *messages])
61
64
  params = @params.merge(params)
62
65
  res = @provider.complete(prompt, params)
63
- @messages.concat [LLM::Message.new(params[:role] || :user, prompt)]
66
+ role = params[:role] || @provider.user_role
67
+ role = @provider.tool_role if params[:role].nil? && [*prompt].grep(LLM::Function::Return).any?
68
+ @messages.concat [LLM::Message.new(role, prompt)]
64
69
  @messages.concat messages
65
70
  @messages.concat [res.choices[-1]]
66
71
  res
@@ -86,7 +91,8 @@ module LLM
86
91
  params = params.merge(previous_response_id: res_id, input: messages).compact
87
92
  params = @params.merge(params)
88
93
  res = @provider.responses.create(prompt, params)
89
- @messages.concat [LLM::Message.new(params[:role] || :user, prompt)]
94
+ role = params[:role] || @provider.user_role
95
+ @messages.concat [LLM::Message.new(role, prompt)]
90
96
  @messages.concat messages
91
97
  @messages.concat [res.choices[-1]]
92
98
  res
@@ -190,6 +196,56 @@ module LLM
190
196
  messages.find(&:assistant?)&.model || @params[:model]
191
197
  end
192
198
 
199
+ ##
200
+ # @return [Hash]
201
+ def to_h
202
+ {model:, messages:}
203
+ end
204
+
205
+ ##
206
+ # @return [String]
207
+ def to_json(...)
208
+ {schema_version: 1}.merge!(to_h).to_json(...)
209
+ end
210
+
211
+ ##
212
+ # Save a session
213
+ # @example
214
+ # llm = LLM.openai(key: ENV["KEY"])
215
+ # ses = LLM::Session.new(llm)
216
+ # ses.talk "Hello"
217
+ # ses.save(path: "session.json")
218
+ # @raise [SystemCallError]
219
+ # Might raise a number of SystemCallError subclasses
220
+ # @return [void]
221
+ def serialize(path:)
222
+ ::File.binwrite path, LLM.json.dump(self)
223
+ end
224
+ alias_method :save, :serialize
225
+
226
+ ##
227
+ # Restore a session
228
+ # @param [String, nil] path
229
+ # The path to a JSON file
230
+ # @param [String, nil] string
231
+ # A raw JSON string
232
+ # @raise [SystemCallError]
233
+ # Might raise a number of SystemCallError subclasses
234
+ # @return [LLM::Session]
235
+ def deserialize(path: nil, string: nil)
236
+ payload = if path.nil? and string.nil?
237
+ raise ArgumentError, "a path or string is required"
238
+ elsif path
239
+ ::File.binread(path)
240
+ else
241
+ string
242
+ end
243
+ ses = LLM.json.load(payload)
244
+ @messages.concat [*ses["messages"]].map { deserialize_message(_1) }
245
+ self
246
+ end
247
+ alias_method :restore, :deserialize
248
+
193
249
  private
194
250
 
195
251
  def fetch(prompt, params)
@@ -203,4 +259,7 @@ module LLM
203
259
 
204
260
  # Backward-compatible alias
205
261
  Bot = Session
262
+
263
+ # Scheduled for removal in v5.0
264
+ deprecate_constant :Bot
206
265
  end
data/lib/llm/buffer.rb CHANGED
@@ -70,6 +70,12 @@ module LLM
70
70
  @messages[index]
71
71
  end
72
72
 
73
+ ##
74
+ # @return [String]
75
+ def to_json(...)
76
+ LLM.json.dump(@messages, ...)
77
+ end
78
+
73
79
  ##
74
80
  # @return [String]
75
81
  def inspect
data/lib/llm/function.rb CHANGED
@@ -33,6 +33,18 @@ class LLM::Function
33
33
  prepend LLM::Function::Tracing
34
34
 
35
35
  class Return < Struct.new(:id, :name, :value)
36
+ ##
37
+ # Returns a Hash representation of {LLM::Function::Return}
38
+ # @return [Hash]
39
+ def to_h
40
+ {id:, name:, value:}
41
+ end
42
+
43
+ ##
44
+ # @return [String]
45
+ def to_json(...)
46
+ LLM.json.dump(to_h, ...)
47
+ end
36
48
  end
37
49
 
38
50
  ##
@@ -35,16 +35,16 @@ module LLM
35
35
  class JSONAdapter::JSON < JSONAdapter
36
36
  ##
37
37
  # @return (see JSONAdapter#dump)
38
- def self.dump(obj)
38
+ def self.dump(obj, ...)
39
39
  require "json" unless defined?(::JSON)
40
- ::JSON.dump(obj)
40
+ ::JSON.dump(obj, ...)
41
41
  end
42
42
 
43
43
  ##
44
44
  # @return (see JSONAdapter#load)
45
- def self.load(string)
45
+ def self.load(string, ...)
46
46
  require "json" unless defined?(::JSON)
47
- ::JSON.parse(string)
47
+ ::JSON.parse(string, ...)
48
48
  end
49
49
 
50
50
  ##
@@ -61,16 +61,16 @@ module LLM
61
61
  class JSONAdapter::Oj < JSONAdapter
62
62
  ##
63
63
  # @return (see JSONAdapter#dump)
64
- def self.dump(obj)
64
+ def self.dump(obj, options = {})
65
65
  require "oj" unless defined?(::Oj)
66
- ::Oj.dump(obj, mode: :compat)
66
+ ::Oj.dump(obj, options.merge(mode: :compat))
67
67
  end
68
68
 
69
69
  ##
70
70
  # @return (see JSONAdapter#load)
71
- def self.load(string)
71
+ def self.load(string, options = {})
72
72
  require "oj" unless defined?(::Oj)
73
- ::Oj.load(string, mode: :compat, symbol_keys: false, symbolize_names: false)
73
+ ::Oj.load(string, options.merge(mode: :compat, symbol_keys: false, symbolize_names: false))
74
74
  end
75
75
 
76
76
  ##
@@ -87,16 +87,16 @@ module LLM
87
87
  class JSONAdapter::Yajl < JSONAdapter
88
88
  ##
89
89
  # @return (see JSONAdapter#dump)
90
- def self.dump(obj)
90
+ def self.dump(obj, ...)
91
91
  require "yajl" unless defined?(::Yajl)
92
- ::Yajl::Encoder.encode(obj)
92
+ ::Yajl::Encoder.encode(obj, ...)
93
93
  end
94
94
 
95
95
  ##
96
96
  # @return (see JSONAdapter#load)
97
- def self.load(string)
97
+ def self.load(string, ...)
98
98
  require "yajl" unless defined?(::Yajl)
99
- ::Yajl::Parser.parse(string)
99
+ ::Yajl::Parser.parse(string, ...)
100
100
  end
101
101
 
102
102
  ##
data/lib/llm/message.rb CHANGED
@@ -30,10 +30,18 @@ module LLM
30
30
  end
31
31
 
32
32
  ##
33
- # Returns a hash representation of the message
33
+ # Returns a Hash representation of the message.
34
34
  # @return [Hash]
35
35
  def to_h
36
- {role:, content:}
36
+ {role:, content:,
37
+ tools: @extra[:tool_calls],
38
+ original_tool_calls: extra[:original_tool_calls]}.compact
39
+ end
40
+
41
+ ##
42
+ # @return [String]
43
+ def to_json(...)
44
+ LLM.json.dump(to_h, ...)
37
45
  end
38
46
 
39
47
  ##
@@ -62,7 +70,7 @@ module LLM
62
70
  # @return [Array<LLM::Function>]
63
71
  def functions
64
72
  @functions ||= tool_calls.map do |fn|
65
- function = tools.find { _1.name.to_s == fn["name"] }.dup
73
+ function = available_tools.find { _1.name.to_s == fn["name"] }.dup
66
74
  function.tap { _1.id = fn.id }
67
75
  function.tap { _1.arguments = fn.arguments }
68
76
  end
@@ -158,7 +166,7 @@ module LLM
158
166
  @tool_calls ||= LLM::Object.from(@extra[:tool_calls] || [])
159
167
  end
160
168
 
161
- def tools
169
+ def available_tools
162
170
  response&.__tools__ || []
163
171
  end
164
172
  end
data/lib/llm/provider.rb CHANGED
@@ -253,6 +253,12 @@ class LLM::Provider
253
253
  :developer
254
254
  end
255
255
 
256
+ ##
257
+ # @return [Symbol]
258
+ def tool_role
259
+ :tool
260
+ end
261
+
256
262
  ##
257
263
  # @return [LLM::Tracer]
258
264
  # Returns an LLM tracer
@@ -131,7 +131,7 @@ class LLM::Gemini
131
131
  req["X-Goog-Upload-Command"] = "start"
132
132
  req["X-Goog-Upload-Header-Content-Length"] = file.bytesize
133
133
  req["X-Goog-Upload-Header-Content-Type"] = file.mime_type
134
- req.body = LLM.json.dump(file: {display_name: File.basename(file.path)})
134
+ req.body = LLM.json.dump({file: {display_name: File.basename(file.path)}})
135
135
  res, span = execute(request: req, operation: "request")
136
136
  finish_trace(operation: "request", res: LLM::Response.new(res), span:)
137
137
  res["x-goog-upload-url"]
@@ -52,8 +52,8 @@ class LLM::Schema
52
52
 
53
53
  ##
54
54
  # @return [String]
55
- def to_json(options = {})
56
- to_h.to_json(options)
55
+ def to_json(...)
56
+ LLM.json.dump(to_h, ...)
57
57
  end
58
58
 
59
59
  ##
@@ -0,0 +1,36 @@
1
+ # frozen_string_literal: true
2
+
3
+ class LLM::Session
4
+ ##
5
+ # @api private
6
+ module Deserializer
7
+ ##
8
+ # @param [Hash] payload
9
+ # @return [LLM::Message]
10
+ def deserialize_message(payload)
11
+ tool_calls = deserialize_tool_calls(payload["tools"])
12
+ returns = deserialize_returns(payload["content"]) if returns.nil?
13
+ original_tool_calls = payload["original_tool_calls"]
14
+ extra = {tool_calls:, original_tool_calls:}.compact
15
+ content = returns.nil? ? payload["content"] : returns
16
+ LLM::Message.new(payload["role"], content, extra)
17
+ end
18
+
19
+ private
20
+
21
+ def deserialize_tool_calls(items)
22
+ items ||= []
23
+ items.empty? ? nil : items
24
+ end
25
+
26
+ def deserialize_returns(items)
27
+ returns = [*items].filter_map do |item|
28
+ next unless Hash === item
29
+ id, name, value = item.values_at("id", "name", "value")
30
+ next if id.nil? || name.nil? || value.nil?
31
+ LLM::Function::Return.new(id, name, value)
32
+ end
33
+ returns.empty? ? nil : returns
34
+ end
35
+ end
36
+ end
data/lib/llm/version.rb CHANGED
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module LLM
4
- VERSION = "4.2.0"
4
+ VERSION = "4.3.0"
5
5
  end
data/llm.gemspec CHANGED
@@ -16,11 +16,11 @@ Gem::Specification.new do |spec|
16
16
  SUMMARY
17
17
 
18
18
  spec.description = spec.summary
19
- spec.homepage = "https://github.com/llmrb/llm"
20
19
  spec.license = "0BSD"
21
20
  spec.required_ruby_version = ">= 3.2.0"
22
21
 
23
- spec.metadata["homepage_uri"] = spec.homepage
22
+ spec.homepage = "https://github.com/llmrb/llm.rb"
23
+ spec.metadata["homepage_uri"] = "https://github.com/llmrb/llm.rb"
24
24
  spec.metadata["source_code_uri"] = "https://github.com/llmrb/llm.rb"
25
25
  spec.metadata["documentation_uri"] = "https://0x1eef.github.io/x/llm.rb"
26
26
 
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: llm.rb
3
3
  version: !ruby/object:Gem::Version
4
- version: 4.2.0
4
+ version: 4.3.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Antar Azri
@@ -313,6 +313,7 @@ files:
313
313
  - lib/llm/schema/version.rb
314
314
  - lib/llm/server_tool.rb
315
315
  - lib/llm/session.rb
316
+ - lib/llm/session/deserializer.rb
316
317
  - lib/llm/tool.rb
317
318
  - lib/llm/tool/param.rb
318
319
  - lib/llm/tracer.rb
@@ -323,11 +324,11 @@ files:
323
324
  - lib/llm/utils.rb
324
325
  - lib/llm/version.rb
325
326
  - llm.gemspec
326
- homepage: https://github.com/llmrb/llm
327
+ homepage: https://github.com/llmrb/llm.rb
327
328
  licenses:
328
329
  - 0BSD
329
330
  metadata:
330
- homepage_uri: https://github.com/llmrb/llm
331
+ homepage_uri: https://github.com/llmrb/llm.rb
331
332
  source_code_uri: https://github.com/llmrb/llm.rb
332
333
  documentation_uri: https://0x1eef.github.io/x/llm.rb
333
334
  rdoc_options: []