llm.rb 0.11.0 → 0.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/LICENSE +0 -0
- data/README.md +56 -19
- data/lib/llm/bot/builder.rb +0 -0
- data/lib/llm/bot/conversable.rb +12 -4
- data/lib/llm/bot/prompt/completion.rb +18 -0
- data/lib/llm/bot/prompt/respond.rb +9 -0
- data/lib/llm/bot.rb +10 -17
- data/lib/llm/buffer.rb +15 -1
- data/lib/llm/error.rb +4 -0
- data/lib/llm/{event_handler.rb → eventhandler.rb} +0 -0
- data/lib/llm/eventstream/event.rb +0 -0
- data/lib/llm/eventstream/parser.rb +0 -0
- data/lib/llm/eventstream.rb +0 -0
- data/lib/llm/file.rb +4 -3
- data/lib/llm/function.rb +4 -4
- data/lib/llm/message.rb +0 -0
- data/lib/llm/mime.rb +88 -6
- data/lib/llm/multipart.rb +0 -1
- data/lib/llm/object/builder.rb +0 -0
- data/lib/llm/object/kernel.rb +0 -0
- data/lib/llm/object.rb +2 -3
- data/lib/llm/provider.rb +3 -3
- data/lib/llm/providers/anthropic/error_handler.rb +2 -0
- data/lib/llm/providers/anthropic/format/completion_format.rb +0 -0
- data/lib/llm/providers/anthropic/format.rb +0 -0
- data/lib/llm/providers/anthropic/models.rb +2 -2
- data/lib/llm/providers/anthropic/response/completion.rb +0 -0
- data/lib/llm/providers/anthropic/stream_parser.rb +0 -0
- data/lib/llm/providers/anthropic.rb +10 -1
- data/lib/llm/providers/deepseek/format/completion_format.rb +0 -0
- data/lib/llm/providers/deepseek/format.rb +0 -0
- data/lib/llm/providers/deepseek.rb +10 -1
- data/lib/llm/providers/gemini/audio.rb +3 -3
- data/lib/llm/providers/gemini/error_handler.rb +2 -0
- data/lib/llm/providers/gemini/files.rb +8 -20
- data/lib/llm/providers/gemini/format/completion_format.rb +2 -2
- data/lib/llm/providers/gemini/format.rb +0 -0
- data/lib/llm/providers/gemini/images.rb +4 -4
- data/lib/llm/providers/gemini/models.rb +2 -2
- data/lib/llm/providers/gemini/response/completion.rb +2 -0
- data/lib/llm/providers/gemini/response/embedding.rb +1 -1
- data/lib/llm/providers/gemini/response/file.rb +0 -0
- data/lib/llm/providers/gemini/response/image.rb +0 -0
- data/lib/llm/providers/gemini/stream_parser.rb +0 -0
- data/lib/llm/providers/gemini.rb +13 -21
- data/lib/llm/providers/llamacpp.rb +12 -1
- data/lib/llm/providers/ollama/error_handler.rb +2 -0
- data/lib/llm/providers/ollama/format/completion_format.rb +0 -0
- data/lib/llm/providers/ollama/format.rb +0 -0
- data/lib/llm/providers/ollama/models.rb +0 -0
- data/lib/llm/providers/ollama/response/completion.rb +0 -0
- data/lib/llm/providers/ollama/response/embedding.rb +1 -2
- data/lib/llm/providers/ollama/stream_parser.rb +0 -0
- data/lib/llm/providers/ollama.rb +8 -11
- data/lib/llm/providers/openai/audio.rb +4 -4
- data/lib/llm/providers/openai/error_handler.rb +13 -1
- data/lib/llm/providers/openai/files.rb +8 -19
- data/lib/llm/providers/openai/format/completion_format.rb +0 -0
- data/lib/llm/providers/openai/format/moderation_format.rb +0 -0
- data/lib/llm/providers/openai/format/respond_format.rb +0 -0
- data/lib/llm/providers/openai/format.rb +0 -0
- data/lib/llm/providers/openai/images.rb +10 -10
- data/lib/llm/providers/openai/models.rb +2 -2
- data/lib/llm/providers/openai/moderations.rb +0 -0
- data/lib/llm/providers/openai/response/audio.rb +0 -0
- data/lib/llm/providers/openai/response/completion.rb +2 -2
- data/lib/llm/providers/openai/response/embedding.rb +3 -3
- data/lib/llm/providers/openai/response/file.rb +0 -0
- data/lib/llm/providers/openai/response/image.rb +0 -0
- data/lib/llm/providers/openai/response/moderations.rb +0 -0
- data/lib/llm/providers/openai/response/responds.rb +0 -1
- data/lib/llm/providers/openai/responses.rb +6 -25
- data/lib/llm/providers/openai/stream_parser.rb +1 -0
- data/lib/llm/providers/openai/vector_stores.rb +85 -3
- data/lib/llm/providers/openai.rb +10 -1
- data/lib/llm/providers/xai/images.rb +58 -0
- data/lib/llm/providers/xai.rb +72 -0
- data/lib/llm/response.rb +5 -0
- data/lib/llm/{json/schema → schema}/array.rb +3 -3
- data/lib/llm/{json/schema → schema}/boolean.rb +3 -3
- data/lib/llm/{json/schema → schema}/integer.rb +6 -6
- data/lib/llm/{json/schema → schema}/leaf.rb +9 -9
- data/lib/llm/{json/schema → schema}/null.rb +3 -3
- data/lib/llm/{json/schema → schema}/number.rb +6 -6
- data/lib/llm/{json/schema → schema}/object.rb +3 -3
- data/lib/llm/{json/schema → schema}/string.rb +5 -5
- data/lib/llm/{json/schema → schema}/version.rb +1 -1
- data/lib/llm/{json/schema.rb → schema.rb} +10 -13
- data/lib/llm/utils.rb +0 -0
- data/lib/llm/version.rb +1 -1
- data/lib/llm.rb +11 -2
- data/llm.gemspec +4 -4
- metadata +22 -20
@@ -2,36 +2,17 @@
|
|
2
2
|
|
3
3
|
class LLM::OpenAI
|
4
4
|
##
|
5
|
-
# The {LLM::OpenAI::Responses LLM::OpenAI::Responses} class provides
|
6
|
-
#
|
7
|
-
# The responses API is similar to the chat completions API but it can maintain
|
8
|
-
# conversation state across multiple requests. This is useful when you want to
|
9
|
-
# save bandwidth and/or not maintain the message thread by yourself.
|
5
|
+
# The {LLM::OpenAI::Responses LLM::OpenAI::Responses} class provides
|
6
|
+
# an interface for [OpenAI's response API](https://platform.openai.com/docs/guides/conversation-state?api-mode=responses).
|
10
7
|
#
|
11
8
|
# @example example #1
|
12
9
|
# #!/usr/bin/env ruby
|
13
10
|
# require "llm"
|
14
11
|
#
|
15
|
-
# llm = LLM.openai(ENV["KEY"])
|
16
|
-
# res1 = llm.responses.create "Your task is to
|
17
|
-
# res2 = llm.responses.create "5 + 5
|
18
|
-
# [res1,res2].each { llm.responses.delete(_1) }
|
19
|
-
#
|
20
|
-
# @example example #2
|
21
|
-
# #!/usr/bin/env ruby
|
22
|
-
# require "llm"
|
23
|
-
#
|
24
|
-
# llm = LLM.openai(ENV["KEY"])
|
25
|
-
# file = llm.files.create file: "/images/hat.png"
|
26
|
-
# res = llm.responses.create ["Describe the image", file]
|
27
|
-
#
|
28
|
-
# @example example #3
|
29
|
-
# #!/usr/bin/env ruby
|
30
|
-
# require "llm"
|
31
|
-
#
|
32
|
-
# llm = LLM.openai(ENV["KEY"])
|
33
|
-
# file = llm.files.create file: "/documents/freebsd.pdf"
|
34
|
-
# res = llm.responses.create ["Describe the document, file]
|
12
|
+
# llm = LLM.openai(key: ENV["KEY"])
|
13
|
+
# res1 = llm.responses.create "Your task is to answer the user's questions", role: :developer
|
14
|
+
# res2 = llm.responses.create "5 + 5 = X ?", role: :user, previous_response_id: res1.id
|
15
|
+
# [res1, res2].each { llm.responses.delete(_1) }
|
35
16
|
class Responses
|
36
17
|
require_relative "response/responds"
|
37
18
|
include Format
|
@@ -2,9 +2,8 @@
|
|
2
2
|
|
3
3
|
class LLM::OpenAI
|
4
4
|
##
|
5
|
-
# The {LLM::OpenAI::
|
6
|
-
# an interface
|
7
|
-
# @see https://platform.openai.com/docs/api-reference/vector_stores/create OpenAI docs
|
5
|
+
# The {LLM::OpenAI::VectorStores LLM::OpenAI::VectorStores} class provides
|
6
|
+
# an interface for [OpenAI's vector stores API](https://platform.openai.com/docs/api-reference/vector_stores/create)
|
8
7
|
class VectorStores
|
9
8
|
##
|
10
9
|
# @param [LLM::Provider] provider
|
@@ -97,6 +96,89 @@ class LLM::OpenAI
|
|
97
96
|
LLM::Response.new(res)
|
98
97
|
end
|
99
98
|
|
99
|
+
##
|
100
|
+
# List all files in a vector store
|
101
|
+
# @param [String, #id] vector The ID of the vector store
|
102
|
+
# @param [Hash] params Other parameters (see OpenAI docs)
|
103
|
+
# @raise (see LLM::Provider#request)
|
104
|
+
# @return [LLM::Response]
|
105
|
+
# @see https://platform.openai.com/docs/api-reference/vector_stores_files/listFiles OpenAI docs
|
106
|
+
def all_files(vector:, **params)
|
107
|
+
vector_id = vector.respond_to?(:id) ? vector.id : vector
|
108
|
+
query = URI.encode_www_form(params)
|
109
|
+
req = Net::HTTP::Get.new("/v1/vector_stores/#{vector_id}/files?#{query}", headers)
|
110
|
+
res = execute(request: req)
|
111
|
+
LLM::Response.new(res)
|
112
|
+
end
|
113
|
+
|
114
|
+
##
|
115
|
+
# Add a file to a vector store
|
116
|
+
# @param [String, #id] vector The ID of the vector store
|
117
|
+
# @param [String, #id] file The ID of the file to add
|
118
|
+
# @param [Hash] attributes Attributes to associate with the file (optional)
|
119
|
+
# @param [Hash] params Other parameters (see OpenAI docs)
|
120
|
+
# @raise (see LLM::Provider#request)
|
121
|
+
# @return [LLM::Response]
|
122
|
+
# @see https://platform.openai.com/docs/api-reference/vector_stores_files/createFile OpenAI docs
|
123
|
+
def add_file(vector:, file:, attributes: nil, **params)
|
124
|
+
vector_id = vector.respond_to?(:id) ? vector.id : vector
|
125
|
+
file_id = file.respond_to?(:id) ? file.id : file
|
126
|
+
req = Net::HTTP::Post.new("/v1/vector_stores/#{vector_id}/files", headers)
|
127
|
+
req.body = JSON.dump(params.merge({file_id:, attributes:}).compact)
|
128
|
+
res = execute(request: req)
|
129
|
+
LLM::Response.new(res)
|
130
|
+
end
|
131
|
+
alias_method :create_file, :add_file
|
132
|
+
|
133
|
+
##
|
134
|
+
# Update a file in a vector store
|
135
|
+
# @param [String, #id] vector The ID of the vector store
|
136
|
+
# @param [String, #id] file The ID of the file to update
|
137
|
+
# @param [Hash] attributes Attributes to associate with the file
|
138
|
+
# @param [Hash] params Other parameters (see OpenAI docs)
|
139
|
+
# @raise (see LLM::Provider#request)
|
140
|
+
# @return [LLM::Response]
|
141
|
+
# @see https://platform.openai.com/docs/api-reference/vector_stores_files/updateAttributes OpenAI docs
|
142
|
+
def update_file(vector:, file:, attributes:, **params)
|
143
|
+
vector_id = vector.respond_to?(:id) ? vector.id : vector
|
144
|
+
file_id = file.respond_to?(:id) ? file.id : file
|
145
|
+
req = Net::HTTP::Post.new("/v1/vector_stores/#{vector_id}/files/#{file_id}", headers)
|
146
|
+
req.body = JSON.dump(params.merge({attributes:}).compact)
|
147
|
+
res = execute(request: req)
|
148
|
+
LLM::Response.new(res)
|
149
|
+
end
|
150
|
+
|
151
|
+
##
|
152
|
+
# Get a file from a vector store
|
153
|
+
# @param [String, #id] vector The ID of the vector store
|
154
|
+
# @param [String, #id] file The ID of the file to retrieve
|
155
|
+
# @raise (see LLM::Provider#request)
|
156
|
+
# @return [LLM::Response]
|
157
|
+
# @see https://platform.openai.com/docs/api-reference/vector_stores_files/getFile OpenAI docs
|
158
|
+
def get_file(vector:, file:, **params)
|
159
|
+
vector_id = vector.respond_to?(:id) ? vector.id : vector
|
160
|
+
file_id = file.respond_to?(:id) ? file.id : file
|
161
|
+
query = URI.encode_www_form(params)
|
162
|
+
req = Net::HTTP::Get.new("/v1/vector_stores/#{vector_id}/files/#{file_id}?#{query}", headers)
|
163
|
+
res = execute(request: req)
|
164
|
+
LLM::Response.new(res)
|
165
|
+
end
|
166
|
+
|
167
|
+
##
|
168
|
+
# Delete a file from a vector store
|
169
|
+
# @param [String, #id] vector The ID of the vector store
|
170
|
+
# @param [String, #id] file The ID of the file to delete
|
171
|
+
# @raise (see LLM::Provider#request)
|
172
|
+
# @return [LLM::Response]
|
173
|
+
# @see https://platform.openai.com/docs/api-reference/vector_stores_files/deleteFile OpenAI docs
|
174
|
+
def delete_file(vector:, file:)
|
175
|
+
vector_id = vector.respond_to?(:id) ? vector.id : vector
|
176
|
+
file_id = file.respond_to?(:id) ? file.id : file
|
177
|
+
req = Net::HTTP::Delete.new("/v1/vector_stores/#{vector_id}/files/#{file_id}", headers)
|
178
|
+
res = execute(request: req)
|
179
|
+
LLM::Response.new(res)
|
180
|
+
end
|
181
|
+
|
100
182
|
private
|
101
183
|
|
102
184
|
[:headers, :execute, :set_body_stream].each do |m|
|
data/lib/llm/providers/openai.rb
CHANGED
@@ -3,7 +3,16 @@
|
|
3
3
|
module LLM
|
4
4
|
##
|
5
5
|
# The OpenAI class implements a provider for
|
6
|
-
# [OpenAI](https://platform.openai.com/)
|
6
|
+
# [OpenAI](https://platform.openai.com/).
|
7
|
+
#
|
8
|
+
# @example
|
9
|
+
# #!/usr/bin/env ruby
|
10
|
+
# require "llm"
|
11
|
+
#
|
12
|
+
# llm = LLM.openai(key: ENV["KEY"])
|
13
|
+
# bot = LLM::Bot.new(llm)
|
14
|
+
# bot.chat ["Tell me about this photo", File.open("/images/capybara.jpg", "rb")]
|
15
|
+
# bot.messages.select(&:assistant?).each { print "[#{_1.role}]", _1.content, "\n" }
|
7
16
|
class OpenAI < Provider
|
8
17
|
require_relative "openai/response/embedding"
|
9
18
|
require_relative "openai/response/completion"
|
@@ -0,0 +1,58 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class LLM::XAI
|
4
|
+
##
|
5
|
+
# The {LLM::XAI::Images LLM::XAI::Images} class provides an interface
|
6
|
+
# for [xAI's images API](https://docs.x.ai/docs/guides/image-generations).
|
7
|
+
# xAI supports multiple response formats: temporary URLs, or binary strings
|
8
|
+
# encoded in base64. The default is to return temporary URLs.
|
9
|
+
#
|
10
|
+
# @example Temporary URLs
|
11
|
+
# #!/usr/bin/env ruby
|
12
|
+
# require "llm"
|
13
|
+
# require "open-uri"
|
14
|
+
# require "fileutils"
|
15
|
+
#
|
16
|
+
# llm = LLM.xai(key: ENV["KEY"])
|
17
|
+
# res = llm.images.create prompt: "A dog on a rocket to the moon"
|
18
|
+
# FileUtils.mv OpenURI.open_uri(res.urls[0]).path,
|
19
|
+
# "rocket.png"
|
20
|
+
#
|
21
|
+
# @example Binary strings
|
22
|
+
# #!/usr/bin/env ruby
|
23
|
+
# require "llm"
|
24
|
+
#
|
25
|
+
# llm = LLM.xai(key: ENV["KEY"])
|
26
|
+
# res = llm.images.create prompt: "A dog on a rocket to the moon",
|
27
|
+
# response_format: "b64_json"
|
28
|
+
# IO.copy_stream res.images[0], "rocket.png"
|
29
|
+
class Images < LLM::OpenAI::Images
|
30
|
+
##
|
31
|
+
# Create an image
|
32
|
+
# @example
|
33
|
+
# llm = LLM.xai(key: ENV["KEY"])
|
34
|
+
# res = llm.images.create prompt: "A dog on a rocket to the moon"
|
35
|
+
# res.urls.each { print _1, "\n"}
|
36
|
+
# @see https://docs.x.ai/docs/guides/image-generations xAI docs
|
37
|
+
# @param [String] prompt The prompt
|
38
|
+
# @param [String] model The model to use
|
39
|
+
# @param [Hash] params Other parameters (see xAI docs)
|
40
|
+
# @raise (see LLM::Provider#request)
|
41
|
+
# @return [LLM::Response]
|
42
|
+
def create(prompt:, model: "grok-2-image-1212", **params)
|
43
|
+
super
|
44
|
+
end
|
45
|
+
|
46
|
+
##
|
47
|
+
# @raise [NotImplementedError]
|
48
|
+
def edit(model: "grok-2-image-1212", **)
|
49
|
+
raise NotImplementedError
|
50
|
+
end
|
51
|
+
|
52
|
+
##
|
53
|
+
# @raise [NotImplementedError]
|
54
|
+
def create_variation(model: "grok-2-image-1212", **)
|
55
|
+
raise NotImplementedError
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
@@ -0,0 +1,72 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require_relative "openai" unless defined?(LLM::OpenAI)
|
4
|
+
|
5
|
+
module LLM
|
6
|
+
##
|
7
|
+
# The XAI class implements a provider for [xAI](https://docs.x.ai).
|
8
|
+
#
|
9
|
+
# @example
|
10
|
+
# #!/usr/bin/env ruby
|
11
|
+
# require "llm"
|
12
|
+
#
|
13
|
+
# llm = LLM.xai(key: ENV["KEY"])
|
14
|
+
# bot = LLM::Bot.new(llm)
|
15
|
+
# bot.chat ["Tell me about this photo", File.open("/images/crow.jpg", "rb")]
|
16
|
+
# bot.messages.select(&:assistant?).each { print "[#{_1.role}]", _1.content, "\n" }
|
17
|
+
class XAI < OpenAI
|
18
|
+
require_relative "xai/images"
|
19
|
+
|
20
|
+
##
|
21
|
+
# @param [String] host A regional host or the default ("api.x.ai")
|
22
|
+
# @param key (see LLM::Provider#initialize)
|
23
|
+
# @see https://docs.x.ai/docs/key-information/regions Regional endpoints
|
24
|
+
def initialize(host: "api.x.ai", **)
|
25
|
+
super
|
26
|
+
end
|
27
|
+
|
28
|
+
##
|
29
|
+
# @raise [NotImplementedError]
|
30
|
+
def files
|
31
|
+
raise NotImplementedError
|
32
|
+
end
|
33
|
+
|
34
|
+
##
|
35
|
+
# @return [LLM::XAI::Images]
|
36
|
+
def images
|
37
|
+
LLM::XAI::Images.new(self)
|
38
|
+
end
|
39
|
+
|
40
|
+
##
|
41
|
+
# @raise [NotImplementedError]
|
42
|
+
def audio
|
43
|
+
raise NotImplementedError
|
44
|
+
end
|
45
|
+
|
46
|
+
##
|
47
|
+
# @raise [NotImplementedError]
|
48
|
+
def moderations
|
49
|
+
raise NotImplementedError
|
50
|
+
end
|
51
|
+
|
52
|
+
##
|
53
|
+
# @raise [NotImplementedError]
|
54
|
+
def responses
|
55
|
+
raise NotImplementedError
|
56
|
+
end
|
57
|
+
|
58
|
+
##
|
59
|
+
# @raise [NotImplementedError]
|
60
|
+
def vector_stores
|
61
|
+
raise NotImplementedError
|
62
|
+
end
|
63
|
+
|
64
|
+
##
|
65
|
+
# Returns the default model for chat completions
|
66
|
+
# #see https://docs.x.ai/docs/models grok-4-0709
|
67
|
+
# @return [String]
|
68
|
+
def default_model
|
69
|
+
"grok-4-0709"
|
70
|
+
end
|
71
|
+
end
|
72
|
+
end
|
data/lib/llm/response.rb
CHANGED
@@ -1,6 +1,11 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
module LLM
|
4
|
+
##
|
5
|
+
# {LLM::Response LLM::Response} encapsulates a response
|
6
|
+
# from an LLM provider. It is returned by all methods
|
7
|
+
# that make requests to a provider, and sometimes extended
|
8
|
+
# with provider-specific functionality.
|
4
9
|
class Response
|
5
10
|
require "json"
|
6
11
|
|
@@ -1,10 +1,10 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
class
|
3
|
+
class LLM::Schema
|
4
4
|
##
|
5
|
-
# The {
|
5
|
+
# The {LLM::Schema::Array LLM::Schema::Array} class represents an
|
6
6
|
# array value in a JSON schema. It is a subclass of
|
7
|
-
# {
|
7
|
+
# {LLM::Schema::Leaf LLM::Schema::Leaf} and provides methods that
|
8
8
|
# can act as constraints.
|
9
9
|
class Array < Leaf
|
10
10
|
def initialize(items)
|
@@ -1,10 +1,10 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
class
|
3
|
+
class LLM::Schema
|
4
4
|
##
|
5
|
-
# The {
|
5
|
+
# The {LLM::Schema::Boolean LLM::Schema::Boolean} class represents a
|
6
6
|
# boolean value in a JSON schema. It is a subclass of
|
7
|
-
# {
|
7
|
+
# {LLM::Schema::Leaf LLM::Schema::Leaf}.
|
8
8
|
class Boolean < Leaf
|
9
9
|
def to_h
|
10
10
|
super.merge!({type: "boolean"})
|
@@ -1,16 +1,16 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
class
|
3
|
+
class LLM::Schema
|
4
4
|
##
|
5
|
-
# The {
|
5
|
+
# The {LLM::Schema::Integer LLM::Schema::Integer} class represents a
|
6
6
|
# whole number value in a JSON schema. It is a subclass of
|
7
|
-
# {
|
7
|
+
# {LLM::Schema::Leaf LLM::Schema::Leaf} and provides methods that
|
8
8
|
# can act as constraints.
|
9
9
|
class Integer < Leaf
|
10
10
|
##
|
11
11
|
# Constrain the number to a minimum value
|
12
12
|
# @param [Integer] i The minimum value
|
13
|
-
# @return [
|
13
|
+
# @return [LLM::Schema::Number] Returns self
|
14
14
|
def min(i)
|
15
15
|
tap { @minimum = i }
|
16
16
|
end
|
@@ -18,7 +18,7 @@ class JSON::Schema
|
|
18
18
|
##
|
19
19
|
# Constrain the number to a maximum value
|
20
20
|
# @param [Integer] i The maximum value
|
21
|
-
# @return [
|
21
|
+
# @return [LLM::Schema::Number] Returns self
|
22
22
|
def max(i)
|
23
23
|
tap { @maximum = i }
|
24
24
|
end
|
@@ -26,7 +26,7 @@ class JSON::Schema
|
|
26
26
|
##
|
27
27
|
# Constrain the number to a multiple of a given value
|
28
28
|
# @param [Integer] i The multiple
|
29
|
-
# @return [
|
29
|
+
# @return [LLM::Schema::Number] Returns self
|
30
30
|
def multiple_of(i)
|
31
31
|
tap { @multiple_of = i }
|
32
32
|
end
|
@@ -1,11 +1,11 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
class
|
3
|
+
class LLM::Schema
|
4
4
|
##
|
5
|
-
# The {
|
5
|
+
# The {LLM::Schema::Leaf LLM::Schema::Leaf} class is the
|
6
6
|
# superclass of all values that can appear in a JSON schema.
|
7
|
-
# See the instance methods of {
|
8
|
-
# an example of how to create instances of {
|
7
|
+
# See the instance methods of {LLM::Schema LLM::Schema} for
|
8
|
+
# an example of how to create instances of {LLM::Schema::Leaf LLM::Schema::Leaf}
|
9
9
|
# through its subclasses.
|
10
10
|
class Leaf
|
11
11
|
def initialize
|
@@ -19,7 +19,7 @@ class JSON::Schema
|
|
19
19
|
##
|
20
20
|
# Set the description of a leaf
|
21
21
|
# @param [String] str The description
|
22
|
-
# @return [
|
22
|
+
# @return [LLM::Schema::Leaf]
|
23
23
|
def description(str)
|
24
24
|
tap { @description = str }
|
25
25
|
end
|
@@ -27,7 +27,7 @@ class JSON::Schema
|
|
27
27
|
##
|
28
28
|
# Set the default value of a leaf
|
29
29
|
# @param [Object] value The default value
|
30
|
-
# @return [
|
30
|
+
# @return [LLM::Schema::Leaf]
|
31
31
|
def default(value)
|
32
32
|
tap { @default = value }
|
33
33
|
end
|
@@ -36,7 +36,7 @@ class JSON::Schema
|
|
36
36
|
# Set the allowed values of a leaf
|
37
37
|
# @see https://tour.json-schema.org/content/02-Primitive-Types/07-Enumerated-Values-II Enumerated Values
|
38
38
|
# @param [Array] values The allowed values
|
39
|
-
# @return [
|
39
|
+
# @return [LLM::Schema::Leaf]
|
40
40
|
def enum(*values)
|
41
41
|
tap { @enum = values }
|
42
42
|
end
|
@@ -45,14 +45,14 @@ class JSON::Schema
|
|
45
45
|
# Set the value of a leaf to be a constant value
|
46
46
|
# @see https://tour.json-schema.org/content/02-Primitive-Types/08-Defining-Constant-Values Constant Values
|
47
47
|
# @param [Object] value The constant value
|
48
|
-
# @return [
|
48
|
+
# @return [LLM::Schema::Leaf]
|
49
49
|
def const(value)
|
50
50
|
tap { @const = value }
|
51
51
|
end
|
52
52
|
|
53
53
|
##
|
54
54
|
# Denote a leaf as required
|
55
|
-
# @return [
|
55
|
+
# @return [LLM::Schema::Leaf]
|
56
56
|
def required
|
57
57
|
tap { @required = true }
|
58
58
|
end
|
@@ -1,10 +1,10 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
class
|
3
|
+
class LLM::Schema
|
4
4
|
##
|
5
|
-
# The {
|
5
|
+
# The {LLM::Schema::Null LLM::Schema::Null} class represents a
|
6
6
|
# null value in a JSON schema. It is a subclass of
|
7
|
-
# {
|
7
|
+
# {LLM::Schema::Leaf LLM::Schema::Leaf}.
|
8
8
|
class Null < Leaf
|
9
9
|
def to_h
|
10
10
|
super.merge!({type: "null"})
|
@@ -1,16 +1,16 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
class
|
3
|
+
class LLM::Schema
|
4
4
|
##
|
5
|
-
# The {
|
5
|
+
# The {LLM::Schema::Number LLM::Schema::Number} class represents a
|
6
6
|
# a number (either whole or decimal) value in a JSON schema. It is a
|
7
|
-
# subclass of {
|
7
|
+
# subclass of {LLM::Schema::Leaf LLM::Schema::Leaf} and provides
|
8
8
|
# methods that can act as constraints.
|
9
9
|
class Number < Leaf
|
10
10
|
##
|
11
11
|
# Constrain the number to a minimum value
|
12
12
|
# @param [Integer, Float] i The minimum value
|
13
|
-
# @return [
|
13
|
+
# @return [LLM::Schema::Number] Returns self
|
14
14
|
def min(i)
|
15
15
|
tap { @minimum = i }
|
16
16
|
end
|
@@ -18,7 +18,7 @@ class JSON::Schema
|
|
18
18
|
##
|
19
19
|
# Constrain the number to a maximum value
|
20
20
|
# @param [Integer, Float] i The maximum value
|
21
|
-
# @return [
|
21
|
+
# @return [LLM::Schema::Number] Returns self
|
22
22
|
def max(i)
|
23
23
|
tap { @maximum = i }
|
24
24
|
end
|
@@ -26,7 +26,7 @@ class JSON::Schema
|
|
26
26
|
##
|
27
27
|
# Constrain the number to a multiple of a given value
|
28
28
|
# @param [Integer, Float] i The multiple
|
29
|
-
# @return [
|
29
|
+
# @return [LLM::Schema::Number] Returns self
|
30
30
|
def multiple_of(i)
|
31
31
|
tap { @multiple_of = i }
|
32
32
|
end
|
@@ -1,10 +1,10 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
class
|
3
|
+
class LLM::Schema
|
4
4
|
##
|
5
|
-
# The {
|
5
|
+
# The {LLM::Schema::Object LLM::Schema::Object} class represents an
|
6
6
|
# object value in a JSON schema. It is a subclass of
|
7
|
-
# {
|
7
|
+
# {LLM::Schema::Leaf LLM::Schema::Leaf} and provides methods that
|
8
8
|
# can act as constraints.
|
9
9
|
class Object < Leaf
|
10
10
|
def initialize(properties)
|
@@ -1,16 +1,16 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
class
|
3
|
+
class LLM::Schema
|
4
4
|
##
|
5
|
-
# The {
|
5
|
+
# The {LLM::Schema::String LLM::Schema::String} class represents a
|
6
6
|
# string value in a JSON schema. It is a subclass of
|
7
|
-
# {
|
7
|
+
# {LLM::Schema::Leaf LLM::Schema::Leaf} and provides methods that
|
8
8
|
# can act as constraints.
|
9
9
|
class String < Leaf
|
10
10
|
##
|
11
11
|
# Constrain the string to a minimum length
|
12
12
|
# @param [Integer] i The minimum length
|
13
|
-
# @return [
|
13
|
+
# @return [LLM::Schema::String] Returns self
|
14
14
|
def min(i)
|
15
15
|
tap { @minimum = i }
|
16
16
|
end
|
@@ -18,7 +18,7 @@ class JSON::Schema
|
|
18
18
|
##
|
19
19
|
# Constrain the string to a maximum length
|
20
20
|
# @param [Integer] i The maximum length
|
21
|
-
# @return [
|
21
|
+
# @return [LLM::Schema::String] Returns self
|
22
22
|
def max(i)
|
23
23
|
tap { @maximum = i }
|
24
24
|
end
|
@@ -1,10 +1,7 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
module JSON
|
4
|
-
end unless defined?(JSON)
|
5
|
-
|
6
3
|
##
|
7
|
-
# The {
|
4
|
+
# The {LLM::Schema LLM::Schema} class represents a JSON schema,
|
8
5
|
# and provides methods that let you describe and produce a schema
|
9
6
|
# that can be used in various contexts that include the validation
|
10
7
|
# and generation of JSON data.
|
@@ -13,14 +10,14 @@ end unless defined?(JSON)
|
|
13
10
|
# @see https://tour.json-schema.org/ JSON Schema Tour
|
14
11
|
#
|
15
12
|
# @example
|
16
|
-
# schema =
|
13
|
+
# schema = LLM::Schema.new
|
17
14
|
# schema.object({
|
18
15
|
# name: schema.string.enum("John", "Jane").required,
|
19
16
|
# age: schema.integer.required,
|
20
17
|
# hobbies: schema.array(schema.string, schema.null).required,
|
21
18
|
# address: schema.object({street: schema.string}).required,
|
22
19
|
# })
|
23
|
-
class
|
20
|
+
class LLM::Schema
|
24
21
|
require_relative "schema/version"
|
25
22
|
require_relative "schema/leaf"
|
26
23
|
require_relative "schema/object"
|
@@ -34,7 +31,7 @@ class JSON::Schema
|
|
34
31
|
##
|
35
32
|
# Returns an object
|
36
33
|
# @param [Hash] properties A hash of properties
|
37
|
-
# @return [
|
34
|
+
# @return [LLM::Schema::Object]
|
38
35
|
def object(properties)
|
39
36
|
Object.new(properties)
|
40
37
|
end
|
@@ -42,42 +39,42 @@ class JSON::Schema
|
|
42
39
|
##
|
43
40
|
# Returns an array
|
44
41
|
# @param [Array] items An array of items
|
45
|
-
# @return [
|
42
|
+
# @return [LLM::Schema::Array]
|
46
43
|
def array(*items)
|
47
44
|
Array.new(*items)
|
48
45
|
end
|
49
46
|
|
50
47
|
##
|
51
48
|
# Returns a string
|
52
|
-
# @return [
|
49
|
+
# @return [LLM::Schema::String]
|
53
50
|
def string
|
54
51
|
String.new
|
55
52
|
end
|
56
53
|
|
57
54
|
##
|
58
55
|
# Returns a number
|
59
|
-
# @return [
|
56
|
+
# @return [LLM::Schema::Number] a number
|
60
57
|
def number
|
61
58
|
Number.new
|
62
59
|
end
|
63
60
|
|
64
61
|
##
|
65
62
|
# Returns an integer
|
66
|
-
# @return [
|
63
|
+
# @return [LLM::Schema::Integer]
|
67
64
|
def integer
|
68
65
|
Integer.new
|
69
66
|
end
|
70
67
|
|
71
68
|
##
|
72
69
|
# Returns a boolean
|
73
|
-
# @return [
|
70
|
+
# @return [LLM::Schema::Boolean]
|
74
71
|
def boolean
|
75
72
|
Boolean.new
|
76
73
|
end
|
77
74
|
|
78
75
|
##
|
79
76
|
# Returns null
|
80
|
-
# @return [
|
77
|
+
# @return [LLM::Schema::Null]
|
81
78
|
def null
|
82
79
|
Null.new
|
83
80
|
end
|
data/lib/llm/utils.rb
CHANGED
File without changes
|
data/lib/llm/version.rb
CHANGED