llm.rb 4.9.0 → 4.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: e876d90bb27d23cb36f97ee0edbc1fe8079a09f473b3a38a31fbd0247f3b9035
4
- data.tar.gz: 82fce429783bcc2b8cb3bf8efc4b705ecc4e793cd6b0301ec616b48ec2d68f97
3
+ metadata.gz: 7fc9e03f0a1b44775f414d310b202dcc68c1a8ebbb4bd7e6e0517902551ffbdf
4
+ data.tar.gz: 40a11ffd6d8f91ec0babbfb442174e68dac38fc2183922ff5994ff33670c572c
5
5
  SHA512:
6
- metadata.gz: e2164d4b134ad12316e1ffa1bdf0d73bc52e14c2514e48bfd6a4963dcb9b3f1bcec932728627a49fec1e4a941e576784d48ae50c59ee174eaa8191dda54123c3
7
- data.tar.gz: 02bf971e89ee97485f83b87a44eef3cdfec0dc4eb5fc7aad3f1383dce34ef3f889a89ae77738933945248c8ea7d76062270cba2fad2d734eba77ea105abffbe4
6
+ metadata.gz: ac72f357d340917b99462f12f3a18d30039b3c4f34e8cbb1686da84f3c75b0d67919fc2d90732268301fa45a25d9e9d721d5e1f33a46a387196349878e384dd0
7
+ data.tar.gz: 92d7f611de8229d7f5cbe065169f0688940753eb1bffcc0ef907fd1d66ec18be840848b692aca81d24b1dfc6068011b644ed9be890e863ed8fa5d51194f089ed
data/README.md CHANGED
@@ -4,7 +4,7 @@
4
4
  <p align="center">
5
5
  <a href="https://0x1eef.github.io/x/llm.rb?rebuild=1"><img src="https://img.shields.io/badge/docs-0x1eef.github.io-blue.svg" alt="RubyDoc"></a>
6
6
  <a href="https://opensource.org/license/0bsd"><img src="https://img.shields.io/badge/License-0BSD-orange.svg?" alt="License"></a>
7
- <a href="https://github.com/llmrb/llm.rb/tags"><img src="https://img.shields.io/badge/version-4.9.0-green.svg?" alt="Version"></a>
7
+ <a href="https://github.com/llmrb/llm.rb/tags"><img src="https://img.shields.io/badge/version-4.10.0-green.svg?" alt="Version"></a>
8
8
  </p>
9
9
 
10
10
  ## About
@@ -139,6 +139,30 @@ ensure
139
139
  end
140
140
  ```
141
141
 
142
+ You can also connect to a hosted MCP server over HTTP. This is useful when the
143
+ server already runs remotely and exposes MCP through a URL instead of a local
144
+ process:
145
+
146
+ ```ruby
147
+ #!/usr/bin/env ruby
148
+ require "llm"
149
+
150
+ llm = LLM.openai(key: ENV["KEY"])
151
+ mcp = LLM.mcp(http: {
152
+ url: "https://api.githubcopilot.com/mcp/",
153
+ headers: {"Authorization" => "Bearer #{ENV.fetch("GITHUB_PAT")}"}
154
+ })
155
+
156
+ begin
157
+ mcp.start
158
+ ctx = LLM::Context.new(llm, stream: $stdout, tools: mcp.tools)
159
+ ctx.talk("List the available GitHub MCP toolsets.")
160
+ ctx.talk(ctx.functions.call) while ctx.functions.any?
161
+ ensure
162
+ mcp.stop
163
+ end
164
+ ```
165
+
142
166
  #### Streaming Chat
143
167
 
144
168
  This example demonstrates llm.rb's streaming support. The `stream: $stdout`
@@ -209,6 +233,7 @@ require "pp"
209
233
  class Report < LLM::Schema
210
234
  property :category, Enum["performance", "security", "outage"], "Report category", required: true
211
235
  property :summary, String, "Short summary", required: true
236
+ property :impact, OneOf[String, Integer], "Primary impact, as text or a count", required: true
212
237
  property :services, Array[String], "Impacted services", required: true
213
238
  property :timestamp, String, "When it happened", optional: true
214
239
  end
@@ -221,6 +246,7 @@ pp res.content!
221
246
  # {
222
247
  # "category" => "performance",
223
248
  # "summary" => "Database latency spiked, causing 5% request timeouts for 12 minutes.",
249
+ # "impact" => "5% request timeouts",
224
250
  # "services" => ["Database"],
225
251
  # "timestamp" => "2024-06-05T10:42:00Z"
226
252
  # }
@@ -0,0 +1,66 @@
1
+ # frozen_string_literal: true
2
+
3
+ module LLM::MCP::Transport
4
+ ##
5
+ # The {LLM::MCP::Transport::HTTP::EventHandler LLM::MCP::Transport::HTTP::EventHandler}
6
+ # class adapts generic server-sent event callbacks into decoded JSON-RPC
7
+ # messages for {LLM::MCP::Transport::HTTP LLM::MCP::Transport::HTTP}.
8
+ # It accumulates event data until a blank line terminates the current
9
+ # event, then parses the payload as JSON and yields it to the callback
10
+ # given at initialization.
11
+ # @private
12
+ class HTTP::EventHandler
13
+ ##
14
+ # @yieldparam [Hash] message
15
+ # A decoded JSON-RPC message
16
+ # @return [LLM::MCP::Transport::HTTP::EventHandler]
17
+ def initialize(&on_message)
18
+ @on_message = on_message
19
+ reset
20
+ end
21
+
22
+ ##
23
+ # Receives the SSE event name.
24
+ # @param [LLM::EventStream::Event] event
25
+ # The event stream event
26
+ # @return [void]
27
+ def on_event(event)
28
+ @event = event.value
29
+ end
30
+
31
+ ##
32
+ # Receives one line of SSE data.
33
+ # @param [LLM::EventStream::Event] event
34
+ # The event stream event
35
+ # @return [void]
36
+ def on_data(event)
37
+ @data << event.value.to_s
38
+ end
39
+
40
+ # The generic event stream parser dispatches one line at a time.
41
+ # A blank line terminates the current SSE event.
42
+ # @param [LLM::EventStream::Event] event
43
+ # The event stream event
44
+ # @return [void]
45
+ def on_chunk(event)
46
+ flush if event.chunk == "\n"
47
+ end
48
+
49
+ private
50
+
51
+ def flush
52
+ return reset if @data.empty? && @event.nil?
53
+ payload = @data.join("\n")
54
+ reset
55
+ return if payload.empty? || payload == "[DONE]"
56
+ @on_message.call(LLM.json.load(payload))
57
+ rescue *LLM.json.parser_error
58
+ reset
59
+ end
60
+
61
+ def reset
62
+ @event = nil
63
+ @data = []
64
+ end
65
+ end
66
+ end
@@ -0,0 +1,122 @@
1
+ # frozen_string_literal: true
2
+
3
+ module LLM::MCP::Transport
4
+ ##
5
+ # The {LLM::MCP::Transport::HTTP LLM::MCP::Transport::HTTP} class
6
+ # provides an HTTP transport for {LLM::MCP LLM::MCP}. It sends
7
+ # JSON-RPC messages with HTTP POST requests and buffers response
8
+ # messages for non-blocking reads.
9
+ class HTTP
10
+ require_relative "http/event_handler"
11
+
12
+ ##
13
+ # @param [String] url
14
+ # The URL for the MCP HTTP endpoint
15
+ # @param [Hash] headers
16
+ # Extra headers to send with requests
17
+ # @param [Integer, nil] timeout
18
+ # The timeout in seconds. Defaults to nil
19
+ # @return [LLM::MCP::Transport::HTTP]
20
+ def initialize(url:, headers: {}, timeout: nil)
21
+ @uri = URI.parse(url)
22
+ @use_ssl = @uri.scheme == "https"
23
+ @headers = headers
24
+ @timeout = timeout
25
+ @queue = []
26
+ @monitor = Monitor.new
27
+ @running = false
28
+ end
29
+
30
+ ##
31
+ # Starts the HTTP transport.
32
+ # @raise [LLM::MCP::Error]
33
+ # When the transport is already running
34
+ # @return [void]
35
+ def start
36
+ lock do
37
+ raise LLM::MCP::Error, "MCP transport is already running" if running?
38
+ @queue.clear
39
+ @running = true
40
+ end
41
+ end
42
+
43
+ ##
44
+ # Stops the HTTP transport and closes the connection.
45
+ # This method is idempotent.
46
+ # @return [void]
47
+ def stop
48
+ lock do
49
+ return nil unless running?
50
+ @running = false
51
+ nil
52
+ end
53
+ end
54
+
55
+ ##
56
+ # Writes a JSON-RPC message via HTTP POST.
57
+ # @param [Hash] message
58
+ # The JSON-RPC message
59
+ # @raise [LLM::MCP::Error]
60
+ # When the transport is not running or the HTTP request fails
61
+ # @return [void]
62
+ def write(message)
63
+ raise LLM::MCP::Error, "MCP transport is not running" unless running?
64
+ http = Net::HTTP.start(uri.host, uri.port, use_ssl:, open_timeout: timeout, read_timeout: timeout)
65
+ req = Net::HTTP::Post.new(uri.path, headers.merge("content-type" => "application/json"))
66
+ req.body = LLM.json.dump(message)
67
+ http.request(req) do |res|
68
+ unless Net::HTTPSuccess === res
69
+ raise LLM::MCP::Error, "MCP transport write failed with HTTP #{res.code}"
70
+ end
71
+ read(res)
72
+ end
73
+ end
74
+
75
+ ##
76
+ # Reads the next queued message without blocking.
77
+ # @raise [LLM::MCP::Error]
78
+ # When the transport is not running
79
+ # @raise [IO::WaitReadable]
80
+ # When no complete message is available to read
81
+ # @return [Hash]
82
+ def read_nonblock
83
+ lock do
84
+ raise LLM::MCP::Error, "MCP transport is not running" unless running?
85
+ raise IO::WaitReadable if @queue.empty?
86
+ @queue.shift
87
+ end
88
+ end
89
+
90
+ ##
91
+ # @return [Boolean]
92
+ # Returns true when the MCP server connection is alive
93
+ def running?
94
+ @running
95
+ end
96
+
97
+ private
98
+
99
+ attr_reader :uri, :use_ssl, :headers, :timeout
100
+
101
+ def enqueue(message)
102
+ lock { @queue << message }
103
+ end
104
+
105
+ def read(res)
106
+ if res["content-type"].to_s.include?("text/event-stream")
107
+ parser = LLM::EventStream::Parser.new
108
+ parser.register EventHandler.new { enqueue(_1) }
109
+ res.read_body { parser << _1 }
110
+ parser.free
111
+ else
112
+ body = +""
113
+ res.read_body { body << _1 }
114
+ enqueue(LLM.json.load(body)) unless body.empty?
115
+ end
116
+ end
117
+
118
+ def lock(&)
119
+ @monitor.synchronize(&)
120
+ end
121
+ end
122
+ end
data/lib/llm/mcp.rb CHANGED
@@ -6,8 +6,8 @@
6
6
  # clients and servers to exchange capabilities such as tools, prompts,
7
7
  # resources, and other structured interactions.
8
8
  #
9
- # In llm.rb, {LLM::MCP LLM::MCP} currently supports stdio servers and
10
- # focuses on discovering tools that can be used through
9
+ # In llm.rb, {LLM::MCP LLM::MCP} currently supports stdio and HTTP
10
+ # transports and focuses on discovering tools that can be used through
11
11
  # {LLM::Context LLM::Context} and {LLM::Agent LLM::Agent}.
12
12
  class LLM::MCP
13
13
  require "monitor"
@@ -15,6 +15,7 @@ class LLM::MCP
15
15
  require_relative "mcp/command"
16
16
  require_relative "mcp/rpc"
17
17
  require_relative "mcp/pipe"
18
+ require_relative "mcp/transport/http"
18
19
  require_relative "mcp/transport/stdio"
19
20
 
20
21
  include RPC
@@ -22,21 +23,34 @@ class LLM::MCP
22
23
  ##
23
24
  # @param [LLM::Provider, nil] llm
24
25
  # The provider to use for MCP transports that need one
25
- # @param [Hash] stdio The configuration for the stdio transport
26
+ # @param [Hash, nil] stdio The configuration for the stdio transport
26
27
  # @option stdio [Array<String>] :argv
27
28
  # The command to run for the MCP process
28
29
  # @option stdio [Hash] :env
29
30
  # The environment variables to set for the MCP process
30
31
  # @option stdio [String, nil] :cwd
31
32
  # The working directory for the MCP process
33
+ # @param [Hash, nil] http The configuration for the HTTP transport
34
+ # @option http [String] :url
35
+ # The URL for the MCP HTTP endpoint
36
+ # @option http [Hash] :headers
37
+ # Extra headers for requests
32
38
  # @param [Integer] timeout The maximum amount of time to wait when reading from an MCP process
33
39
  # @return [LLM::MCP] A new MCP instance
34
- def initialize(llm = nil, stdio:, timeout: 30)
40
+ def initialize(llm = nil, stdio: nil, http: nil, timeout: 30)
35
41
  @llm = llm
36
- @command = Command.new(**stdio)
37
42
  @monitor = Monitor.new
38
- @transport = Transport::Stdio.new(command:)
39
43
  @timeout = timeout
44
+ if stdio && http
45
+ raise ArgumentError, "stdio and http are mutually exclusive"
46
+ elsif stdio
47
+ @command = Command.new(**stdio)
48
+ @transport = Transport::Stdio.new(command:)
49
+ elsif http
50
+ @transport = Transport::HTTP.new(**http, timeout:)
51
+ else
52
+ raise ArgumentError, "stdio or http is required"
53
+ end
40
54
  end
41
55
 
42
56
  ##
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ class LLM::Schema
4
+ ##
5
+ # The {LLM::Schema::AllOf LLM::Schema::AllOf} class represents an
6
+ # allOf union in a JSON schema. It is a subclass of
7
+ # {LLM::Schema::Leaf LLM::Schema::Leaf}.
8
+ class AllOf < Leaf
9
+ ##
10
+ # Returns an allOf union for the given types.
11
+ # @return [LLM::Schema::AllOf]
12
+ def self.[](*types)
13
+ schema = LLM::Schema.new
14
+ new(types.map { LLM::Schema::Utils.resolve(schema, _1) })
15
+ end
16
+
17
+ ##
18
+ # @param [Array<LLM::Schema::Leaf>] values
19
+ # The values required by the union
20
+ # @return [LLM::Schema::AllOf]
21
+ def initialize(values)
22
+ @values = values
23
+ end
24
+
25
+ ##
26
+ # @return [Hash]
27
+ def to_h
28
+ super.merge!(allOf: @values)
29
+ end
30
+ end
31
+ end
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ class LLM::Schema
4
+ ##
5
+ # The {LLM::Schema::AnyOf LLM::Schema::AnyOf} class represents an
6
+ # anyOf union in a JSON schema. It is a subclass of
7
+ # {LLM::Schema::Leaf LLM::Schema::Leaf}.
8
+ class AnyOf < Leaf
9
+ ##
10
+ # Returns an anyOf union for the given types.
11
+ # @return [LLM::Schema::AnyOf]
12
+ def self.[](*types)
13
+ schema = LLM::Schema.new
14
+ new(types.map { LLM::Schema::Utils.resolve(schema, _1) })
15
+ end
16
+
17
+ ##
18
+ # @param [Array<LLM::Schema::Leaf>] values
19
+ # The values allowed by the union
20
+ # @return [LLM::Schema::AnyOf]
21
+ def initialize(values)
22
+ @values = values
23
+ end
24
+
25
+ ##
26
+ # @return [Hash]
27
+ def to_h
28
+ super.merge!(anyOf: @values)
29
+ end
30
+ end
31
+ end
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ class LLM::Schema
4
+ ##
5
+ # The {LLM::Schema::OneOf LLM::Schema::OneOf} class represents an
6
+ # oneOf union in a JSON schema. It is a subclass of
7
+ # {LLM::Schema::Leaf LLM::Schema::Leaf}.
8
+ class OneOf < Leaf
9
+ ##
10
+ # Returns a oneOf union for the given types.
11
+ # @return [LLM::Schema::OneOf]
12
+ def self.[](*types)
13
+ schema = LLM::Schema.new
14
+ new(types.map { LLM::Schema::Utils.resolve(schema, _1) })
15
+ end
16
+
17
+ ##
18
+ # @param [Array<LLM::Schema::Leaf>] values
19
+ # The values allowed by the union
20
+ # @return [LLM::Schema::OneOf]
21
+ def initialize(values)
22
+ @values = values
23
+ end
24
+
25
+ ##
26
+ # @return [Hash]
27
+ def to_h
28
+ super.merge!(oneOf: @values)
29
+ end
30
+ end
31
+ end
@@ -8,6 +8,8 @@ class LLM::Schema
8
8
  # external JSON schema definitions into the schema objects used
9
9
  # throughout llm.rb.
10
10
  module Parser
11
+ METADATA_KEYS = %w[description default enum const].freeze
12
+
11
13
  ##
12
14
  # Parses a JSON schema into an {LLM::Schema::Leaf}.
13
15
  # @param [Hash] schema
@@ -27,6 +29,8 @@ class LLM::Schema
27
29
  when "number" then apply(parse_number(schema), schema)
28
30
  when "boolean" then apply(schema().boolean, schema)
29
31
  when "null" then apply(schema().null, schema)
32
+ when ::Array then apply(schema().any_of(*schema["type"].map { parse(schema.except("type", *METADATA_KEYS).merge("type" => _1), root) }), schema.except("type"))
33
+ when nil then parse_union(schema, root)
30
34
  else raise TypeError, "unsupported schema type #{schema["type"].inspect}"
31
35
  end
32
36
  end
@@ -50,6 +54,14 @@ class LLM::Schema
50
54
  schema().array(items)
51
55
  end
52
56
 
57
+ def parse_union(schema, root)
58
+ return apply(schema().any_of(*schema["anyOf"].map { parse(_1, root) }), schema) if schema.key?("anyOf")
59
+ return apply(schema().one_of(*schema["oneOf"].map { parse(_1, root) }), schema) if schema.key?("oneOf")
60
+ return apply(schema().all_of(*schema["allOf"].map { parse(_1, root) }), schema) if schema.key?("allOf")
61
+ return parse(infer_type(schema), root) if infer_type(schema)
62
+ raise TypeError, "unsupported schema type #{schema["type"].inspect}"
63
+ end
64
+
53
65
  def parse_string(schema)
54
66
  leaf = schema().string
55
67
  leaf.min(schema["minLength"]) if schema.key?("minLength")
@@ -105,5 +117,29 @@ class LLM::Schema
105
117
  rescue KeyError
106
118
  raise TypeError, "unresolvable schema ref #{ref.inspect}"
107
119
  end
120
+
121
+ def infer_type(schema)
122
+ if schema.key?("const")
123
+ schema.merge("type" => type_of(schema["const"]))
124
+ elsif schema.key?("enum")
125
+ type = type_of(schema["enum"].first)
126
+ return unless type && schema["enum"].all? { type_of(_1) == type }
127
+ schema.merge("type" => type)
128
+ elsif schema.key?("default")
129
+ schema.merge("type" => type_of(schema["default"]))
130
+ end
131
+ end
132
+
133
+ def type_of(value)
134
+ case value
135
+ when ::Hash then "object"
136
+ when ::Array then "array"
137
+ when ::String then "string"
138
+ when ::Integer then "integer"
139
+ when ::Float then "number"
140
+ when ::TrueClass, ::FalseClass then "boolean"
141
+ when ::NilClass then "null"
142
+ end
143
+ end
108
144
  end
109
145
  end
data/lib/llm/schema.rb CHANGED
@@ -35,6 +35,9 @@ class LLM::Schema
35
35
  require_relative "schema/leaf"
36
36
  require_relative "schema/object"
37
37
  require_relative "schema/array"
38
+ require_relative "schema/all_of"
39
+ require_relative "schema/any_of"
40
+ require_relative "schema/one_of"
38
41
  require_relative "schema/string"
39
42
  require_relative "schema/enum"
40
43
  require_relative "schema/number"
@@ -45,6 +48,23 @@ class LLM::Schema
45
48
  @__monitor = Monitor.new
46
49
  extend LLM::Schema::Parser
47
50
 
51
+ ##
52
+ # @api private
53
+ module Utils
54
+ extend self
55
+
56
+ def resolve(schema, type)
57
+ if LLM::Schema::Leaf === type
58
+ type
59
+ elsif Class === type && type.respond_to?(:object)
60
+ type.object
61
+ else
62
+ target = type.name.split("::").last.downcase
63
+ schema.public_send(target)
64
+ end
65
+ end
66
+ end
67
+
48
68
  ##
49
69
  # Configures a monitor for a subclass
50
70
  # @return [void]
@@ -65,14 +85,7 @@ class LLM::Schema
65
85
  # A hash of options
66
86
  def self.property(name, type, description, options = {})
67
87
  lock do
68
- if LLM::Schema::Leaf === type
69
- prop = type
70
- elsif Class === type && type.respond_to?(:object)
71
- prop = type.object
72
- else
73
- target = type.name.split("::").last.downcase
74
- prop = schema.public_send(target)
75
- end
88
+ prop = Utils.resolve(schema, type)
76
89
  options = {description:}.merge(options)
77
90
  options.each { (_2 == true) ? prop.public_send(_1) : prop.public_send(_1, *_2) }
78
91
  object[name] = prop
@@ -120,6 +133,30 @@ class LLM::Schema
120
133
  Array.new(*items)
121
134
  end
122
135
 
136
+ ##
137
+ # Returns an anyOf union
138
+ # @param [Array<LLM::Schema::Leaf>] values The union values
139
+ # @return [LLM::Schema::AnyOf]
140
+ def any_of(*values)
141
+ AnyOf.new(values)
142
+ end
143
+
144
+ ##
145
+ # Returns an allOf union
146
+ # @param [Array<LLM::Schema::Leaf>] values The union values
147
+ # @return [LLM::Schema::AllOf]
148
+ def all_of(*values)
149
+ AllOf.new(values)
150
+ end
151
+
152
+ ##
153
+ # Returns a oneOf union
154
+ # @param [Array<LLM::Schema::Leaf>] values The union values
155
+ # @return [LLM::Schema::OneOf]
156
+ def one_of(*values)
157
+ OneOf.new(values)
158
+ end
159
+
123
160
  ##
124
161
  # Returns a string
125
162
  # @return [LLM::Schema::String]
data/lib/llm/tool.rb CHANGED
@@ -22,9 +22,10 @@ class LLM::Tool
22
22
  extend LLM::Tool::Param
23
23
 
24
24
  types = [
25
- :Leaf, :String, :Enum, :Array,
25
+ :Leaf, :String, :Enum,
26
+ :AllOf, :AnyOf, :OneOf,
26
27
  :Object, :Integer, :Number,
27
- :Boolean, :Null
28
+ :Array, :Boolean, :Null
28
29
  ]
29
30
  types.each do |constant|
30
31
  const_set constant, LLM::Schema.const_get(constant)
data/lib/llm/version.rb CHANGED
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module LLM
4
- VERSION = "4.9.0"
4
+ VERSION = "4.10.0"
5
5
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: llm.rb
3
3
  version: !ruby/object:Gem::Version
4
- version: 4.9.0
4
+ version: 4.10.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Antar Azri
@@ -271,6 +271,8 @@ files:
271
271
  - lib/llm/mcp/error.rb
272
272
  - lib/llm/mcp/pipe.rb
273
273
  - lib/llm/mcp/rpc.rb
274
+ - lib/llm/mcp/transport/http.rb
275
+ - lib/llm/mcp/transport/http/event_handler.rb
274
276
  - lib/llm/mcp/transport/stdio.rb
275
277
  - lib/llm/message.rb
276
278
  - lib/llm/mime.rb
@@ -358,6 +360,8 @@ files:
358
360
  - lib/llm/registry.rb
359
361
  - lib/llm/response.rb
360
362
  - lib/llm/schema.rb
363
+ - lib/llm/schema/all_of.rb
364
+ - lib/llm/schema/any_of.rb
361
365
  - lib/llm/schema/array.rb
362
366
  - lib/llm/schema/boolean.rb
363
367
  - lib/llm/schema/enum.rb
@@ -366,6 +370,7 @@ files:
366
370
  - lib/llm/schema/null.rb
367
371
  - lib/llm/schema/number.rb
368
372
  - lib/llm/schema/object.rb
373
+ - lib/llm/schema/one_of.rb
369
374
  - lib/llm/schema/parser.rb
370
375
  - lib/llm/schema/string.rb
371
376
  - lib/llm/schema/version.rb