llm.rb 0.12.0 → 0.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +27 -19
  3. data/lib/llm/bot/conversable.rb +12 -4
  4. data/lib/llm/bot/prompt/completion.rb +18 -0
  5. data/lib/llm/bot/prompt/respond.rb +9 -0
  6. data/lib/llm/bot.rb +3 -3
  7. data/lib/llm/buffer.rb +31 -7
  8. data/lib/llm/error.rb +4 -0
  9. data/lib/llm/file.rb +1 -1
  10. data/lib/llm/function.rb +2 -2
  11. data/lib/llm/mime.rb +92 -6
  12. data/lib/llm/provider.rb +4 -3
  13. data/lib/llm/providers/anthropic/error_handler.rb +2 -0
  14. data/lib/llm/providers/anthropic/files.rb +155 -0
  15. data/lib/llm/providers/anthropic/format/completion_format.rb +12 -2
  16. data/lib/llm/providers/anthropic/models.rb +2 -1
  17. data/lib/llm/providers/anthropic/response/enumerable.rb +11 -0
  18. data/lib/llm/providers/anthropic/response/file.rb +23 -0
  19. data/lib/llm/providers/anthropic.rb +11 -1
  20. data/lib/llm/providers/gemini/error_handler.rb +2 -0
  21. data/lib/llm/providers/gemini/files.rb +2 -1
  22. data/lib/llm/providers/gemini/models.rb +2 -1
  23. data/lib/llm/providers/gemini/response/completion.rb +2 -0
  24. data/lib/llm/providers/gemini/response/files.rb +15 -0
  25. data/lib/llm/providers/gemini/response/models.rb +15 -0
  26. data/lib/llm/providers/ollama/error_handler.rb +2 -0
  27. data/lib/llm/providers/openai/error_handler.rb +13 -1
  28. data/lib/llm/providers/openai/files.rb +2 -1
  29. data/lib/llm/providers/openai/models.rb +3 -1
  30. data/lib/llm/providers/openai/response/enumerable.rb +11 -0
  31. data/lib/llm/providers/openai/vector_stores.rb +5 -3
  32. data/lib/llm/providers/xai/images.rb +1 -1
  33. data/lib/llm/{json/schema → schema}/array.rb +3 -3
  34. data/lib/llm/{json/schema → schema}/boolean.rb +3 -3
  35. data/lib/llm/{json/schema → schema}/integer.rb +6 -6
  36. data/lib/llm/{json/schema → schema}/leaf.rb +9 -9
  37. data/lib/llm/{json/schema → schema}/null.rb +3 -3
  38. data/lib/llm/{json/schema → schema}/number.rb +6 -6
  39. data/lib/llm/{json/schema → schema}/object.rb +3 -3
  40. data/lib/llm/{json/schema → schema}/string.rb +5 -5
  41. data/lib/llm/{json/schema → schema}/version.rb +1 -1
  42. data/lib/llm/{json/schema.rb → schema.rb} +10 -13
  43. data/lib/llm/version.rb +1 -1
  44. data/lib/llm.rb +1 -1
  45. data/llm.gemspec +1 -1
  46. metadata +19 -13
@@ -60,6 +60,12 @@ module LLM::Anthropic::Format
60
60
  "is not an image or PDF, and therefore not supported by the " \
61
61
  "Anthropic API"
62
62
  end
63
+ when LLM::Response
64
+ if content.file?
65
+ [{type: content.file_type, source: {type: :file, file_id: content.id}}]
66
+ else
67
+ prompt_error!(content)
68
+ end
63
69
  when String
64
70
  [{type: :text, text: content}]
65
71
  when LLM::Message
@@ -67,11 +73,15 @@ module LLM::Anthropic::Format
67
73
  when LLM::Function::Return
68
74
  [{type: "tool_result", tool_use_id: content.id, content: [{type: :text, text: JSON.dump(content.value)}]}]
69
75
  else
70
- raise LLM::PromptError, "The given object (an instance of #{content.class}) " \
71
- "is not supported by the Anthropic API"
76
+ prompt_error!(content)
72
77
  end
73
78
  end
74
79
 
80
+ def prompt_error!(content)
81
+ raise LLM::PromptError, "The given object (an instance of #{content.class}) " \
82
+ "is not supported by the Anthropic API"
83
+ end
84
+
75
85
  def message = @message
76
86
  def content = message.content
77
87
  end
@@ -1,6 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class LLM::Anthropic
4
+ require_relative "response/enumerable"
4
5
  ##
5
6
  # The {LLM::Anthropic::Models LLM::Anthropic::Models} class provides a model
6
7
  # object for interacting with [Anthropic's models API](https://platform.anthropic.com/docs/api-reference/models/list).
@@ -41,7 +42,7 @@ class LLM::Anthropic
41
42
  query = URI.encode_www_form(params)
42
43
  req = Net::HTTP::Get.new("/v1/models?#{query}", headers)
43
44
  res = execute(request: req)
44
- LLM::Response.new(res)
45
+ LLM::Response.new(res).extend(LLM::Anthropic::Response::Enumerable)
45
46
  end
46
47
 
47
48
  private
@@ -0,0 +1,11 @@
1
+ # frozen_string_literal: true
2
+
3
+ module LLM::Anthropic::Response
4
+ module Enumerable
5
+ include ::Enumerable
6
+ def each(&)
7
+ return enum_for(:each) unless block_given?
8
+ data.each { yield(_1) }
9
+ end
10
+ end
11
+ end
@@ -0,0 +1,23 @@
1
+ # frozen_string_literal: true
2
+
3
+ module LLM::Anthropic::Response
4
+ module File
5
+ ##
6
+ # Always return true
7
+ # @return [Boolean]
8
+ def file? = true
9
+
10
+ ##
11
+ # Returns the file type referenced by a prompt
12
+ # @return [Symbol]
13
+ def file_type
14
+ if mime_type.start_with?("image/")
15
+ :image
16
+ elsif mime_type == "text/plain" || mime_type == "application/pdf"
17
+ :document
18
+ else
19
+ :container_upload
20
+ end
21
+ end
22
+ end
23
+ end
@@ -18,6 +18,7 @@ module LLM
18
18
  require_relative "anthropic/format"
19
19
  require_relative "anthropic/error_handler"
20
20
  require_relative "anthropic/stream_parser"
21
+ require_relative "anthropic/files"
21
22
  require_relative "anthropic/models"
22
23
  include Format
23
24
 
@@ -60,6 +61,14 @@ module LLM
60
61
  LLM::Anthropic::Models.new(self)
61
62
  end
62
63
 
64
+ ##
65
+ # Provides an interface to Anthropic's files API
66
+ # @see https://docs.anthropic.com/en/docs/build-with-claude/files Anthropic docs
67
+ # @return [LLM::Anthropic::Files]
68
+ def files
69
+ LLM::Anthropic::Files.new(self)
70
+ end
71
+
63
72
  ##
64
73
  # @return (see LLM::Provider#assistant_role)
65
74
  def assistant_role
@@ -80,7 +89,8 @@ module LLM
80
89
  (@headers || {}).merge(
81
90
  "Content-Type" => "application/json",
82
91
  "x-api-key" => @key,
83
- "anthropic-version" => "2023-06-01"
92
+ "anthropic-version" => "2023-06-01",
93
+ "anthropic-beta" => "files-api-2025-04-14"
84
94
  )
85
95
  end
86
96
 
@@ -22,6 +22,8 @@ class LLM::Gemini
22
22
  # Raises a subclass of {LLM::Error LLM::Error}
23
23
  def raise_error!
24
24
  case res
25
+ when Net::HTTPServerError
26
+ raise LLM::ServerError.new { _1.response = res }, "Server error"
25
27
  when Net::HTTPBadRequest
26
28
  reason = body.dig("error", "details", 0, "reason")
27
29
  if reason == "API_KEY_INVALID"
@@ -24,6 +24,7 @@ class LLM::Gemini
24
24
  # bot.messages.select(&:assistant?).each { print "[#{_1.role}]", _1.content, "\n" }
25
25
  class Files
26
26
  require_relative "response/file"
27
+ require_relative "response/files"
27
28
 
28
29
  ##
29
30
  # Returns a new Files object
@@ -49,7 +50,7 @@ class LLM::Gemini
49
50
  query = URI.encode_www_form(params.merge!(key: key))
50
51
  req = Net::HTTP::Get.new("/v1beta/files?#{query}", headers)
51
52
  res = execute(request: req)
52
- LLM::Response.new(res)
53
+ LLM::Response.new(res).extend(LLM::Gemini::Response::Files)
53
54
  end
54
55
 
55
56
  ##
@@ -17,6 +17,7 @@ class LLM::Gemini
17
17
  # print "id: ", model.id, "\n"
18
18
  # end
19
19
  class Models
20
+ require_relative "response/models"
20
21
  include LLM::Utils
21
22
 
22
23
  ##
@@ -43,7 +44,7 @@ class LLM::Gemini
43
44
  query = URI.encode_www_form(params.merge!(key: key))
44
45
  req = Net::HTTP::Get.new("/v1beta/models?#{query}", headers)
45
46
  res = execute(request: req)
46
- LLM::Response.new(res)
47
+ LLM::Response.new(res).extend(LLM::Gemini::Response::Models)
47
48
  end
48
49
 
49
50
  private
@@ -28,5 +28,7 @@ module LLM::Gemini::Response
28
28
  LLM::Object.new(function)
29
29
  end
30
30
  end
31
+
32
+ def candidates = body.candidates || []
31
33
  end
32
34
  end
@@ -0,0 +1,15 @@
1
+ # frozen_string_literal: true
2
+
3
+ module LLM::Gemini::Response
4
+ module Files
5
+ include ::Enumerable
6
+ def each(&)
7
+ return enum_for(:each) unless block_given?
8
+ files.each { yield(_1) }
9
+ end
10
+
11
+ def files
12
+ body.files || []
13
+ end
14
+ end
15
+ end
@@ -0,0 +1,15 @@
1
+ # frozen_string_literal: true
2
+
3
+ module LLM::Gemini::Response
4
+ module Models
5
+ include ::Enumerable
6
+ def each(&)
7
+ return enum_for(:each) unless block_given?
8
+ models.each { yield(_1) }
9
+ end
10
+
11
+ def models
12
+ body.models || []
13
+ end
14
+ end
15
+ end
@@ -22,6 +22,8 @@ class LLM::Ollama
22
22
  # Raises a subclass of {LLM::Error LLM::Error}
23
23
  def raise_error!
24
24
  case res
25
+ when Net::HTTPServerError
26
+ raise LLM::ServerError.new { _1.response = res }, "Server error"
25
27
  when Net::HTTPUnauthorized
26
28
  raise LLM::UnauthorizedError.new { _1.response = res }, "Authentication error"
27
29
  when Net::HTTPTooManyRequests
@@ -22,13 +22,25 @@ class LLM::OpenAI
22
22
  # Raises a subclass of {LLM::Error LLM::Error}
23
23
  def raise_error!
24
24
  case res
25
+ when Net::HTTPServerError
26
+ raise LLM::ServerError.new { _1.response = res }, "Server error"
25
27
  when Net::HTTPUnauthorized
26
28
  raise LLM::UnauthorizedError.new { _1.response = res }, "Authentication error"
27
29
  when Net::HTTPTooManyRequests
28
30
  raise LLM::RateLimitError.new { _1.response = res }, "Too many requests"
29
31
  else
30
- raise LLM::ResponseError.new { _1.response = res }, "Unexpected response"
32
+ error = body["error"] || {}
33
+ case error["type"]
34
+ when "server_error" then raise LLM::ServerError.new { _1.response = res }, error["message"]
35
+ else raise LLM::ResponseError.new { _1.response = res }, error["message"] || "Unexpected response"
36
+ end
31
37
  end
32
38
  end
39
+
40
+ private
41
+
42
+ def body
43
+ @body ||= JSON.parse(res.body)
44
+ end
33
45
  end
34
46
  end
@@ -18,6 +18,7 @@ class LLM::OpenAI
18
18
  # bot.chat ["Tell me about this PDF", file]
19
19
  # bot.messages.select(&:assistant?).each { print "[#{_1.role}]", _1.content, "\n" }
20
20
  class Files
21
+ require_relative "response/enumerable"
21
22
  require_relative "response/file"
22
23
 
23
24
  ##
@@ -44,7 +45,7 @@ class LLM::OpenAI
44
45
  query = URI.encode_www_form(params)
45
46
  req = Net::HTTP::Get.new("/v1/files?#{query}", headers)
46
47
  res = execute(request: req)
47
- LLM::Response.new(res)
48
+ LLM::Response.new(res).extend(LLM::OpenAI::Response::Enumerable)
48
49
  end
49
50
 
50
51
  ##
@@ -17,6 +17,8 @@ class LLM::OpenAI
17
17
  # print "id: ", model.id, "\n"
18
18
  # end
19
19
  class Models
20
+ require_relative "response/enumerable"
21
+
20
22
  ##
21
23
  # Returns a new Models object
22
24
  # @param provider [LLM::Provider]
@@ -41,7 +43,7 @@ class LLM::OpenAI
41
43
  query = URI.encode_www_form(params)
42
44
  req = Net::HTTP::Get.new("/v1/models?#{query}", headers)
43
45
  res = execute(request: req)
44
- LLM::Response.new(res)
46
+ LLM::Response.new(res).extend(LLM::OpenAI::Response::Enumerable)
45
47
  end
46
48
 
47
49
  private
@@ -0,0 +1,11 @@
1
+ # frozen_string_literal: true
2
+
3
+ module LLM::OpenAI::Response
4
+ module Enumerable
5
+ include ::Enumerable
6
+ def each(&)
7
+ return enum_for(:each) unless block_given?
8
+ data.each { yield(_1) }
9
+ end
10
+ end
11
+ end
@@ -5,6 +5,8 @@ class LLM::OpenAI
5
5
  # The {LLM::OpenAI::VectorStores LLM::OpenAI::VectorStores} class provides
6
6
  # an interface for [OpenAI's vector stores API](https://platform.openai.com/docs/api-reference/vector_stores/create)
7
7
  class VectorStores
8
+ require_relative "response/enumerable"
9
+
8
10
  ##
9
11
  # @param [LLM::Provider] provider
10
12
  # An OpenAI provider
@@ -20,7 +22,7 @@ class LLM::OpenAI
20
22
  query = URI.encode_www_form(params)
21
23
  req = Net::HTTP::Get.new("/v1/vector_stores?#{query}", headers)
22
24
  res = execute(request: req)
23
- LLM::Response.new(res)
25
+ LLM::Response.new(res).extend(LLM::OpenAI::Response::Enumerable)
24
26
  end
25
27
 
26
28
  ##
@@ -93,7 +95,7 @@ class LLM::OpenAI
93
95
  req = Net::HTTP::Post.new("/v1/vector_stores/#{vector_id}/search", headers)
94
96
  req.body = JSON.dump(params.merge({query:}).compact)
95
97
  res = execute(request: req)
96
- LLM::Response.new(res)
98
+ LLM::Response.new(res).extend(LLM::OpenAI::Response::Enumerable)
97
99
  end
98
100
 
99
101
  ##
@@ -108,7 +110,7 @@ class LLM::OpenAI
108
110
  query = URI.encode_www_form(params)
109
111
  req = Net::HTTP::Get.new("/v1/vector_stores/#{vector_id}/files?#{query}", headers)
110
112
  res = execute(request: req)
111
- LLM::Response.new(res)
113
+ LLM::Response.new(res).extend(LLM::OpenAI::Response::Enumerable)
112
114
  end
113
115
 
114
116
  ##
@@ -39,7 +39,7 @@ class LLM::XAI
39
39
  # @param [Hash] params Other parameters (see xAI docs)
40
40
  # @raise (see LLM::Provider#request)
41
41
  # @return [LLM::Response]
42
- def create(model: "grok-2-image-1212", **)
42
+ def create(prompt:, model: "grok-2-image-1212", **params)
43
43
  super
44
44
  end
45
45
 
@@ -1,10 +1,10 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- class JSON::Schema
3
+ class LLM::Schema
4
4
  ##
5
- # The {JSON::Schema::Array JSON::Schema::Array} class represents an
5
+ # The {LLM::Schema::Array LLM::Schema::Array} class represents an
6
6
  # array value in a JSON schema. It is a subclass of
7
- # {JSON::Schema::Leaf JSON::Schema::Leaf} and provides methods that
7
+ # {LLM::Schema::Leaf LLM::Schema::Leaf} and provides methods that
8
8
  # can act as constraints.
9
9
  class Array < Leaf
10
10
  def initialize(items)
@@ -1,10 +1,10 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- class JSON::Schema
3
+ class LLM::Schema
4
4
  ##
5
- # The {JSON::Schema::Boolean JSON::Schema::Boolean} class represents a
5
+ # The {LLM::Schema::Boolean LLM::Schema::Boolean} class represents a
6
6
  # boolean value in a JSON schema. It is a subclass of
7
- # {JSON::Schema::Leaf JSON::Schema::Leaf}.
7
+ # {LLM::Schema::Leaf LLM::Schema::Leaf}.
8
8
  class Boolean < Leaf
9
9
  def to_h
10
10
  super.merge!({type: "boolean"})
@@ -1,16 +1,16 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- class JSON::Schema
3
+ class LLM::Schema
4
4
  ##
5
- # The {JSON::Schema::Integer JSON::Schema::Integer} class represents a
5
+ # The {LLM::Schema::Integer LLM::Schema::Integer} class represents a
6
6
  # whole number value in a JSON schema. It is a subclass of
7
- # {JSON::Schema::Leaf JSON::Schema::Leaf} and provides methods that
7
+ # {LLM::Schema::Leaf LLM::Schema::Leaf} and provides methods that
8
8
  # can act as constraints.
9
9
  class Integer < Leaf
10
10
  ##
11
11
  # Constrain the number to a minimum value
12
12
  # @param [Integer] i The minimum value
13
- # @return [JSON::Schema::Number] Returns self
13
+ # @return [LLM::Schema::Number] Returns self
14
14
  def min(i)
15
15
  tap { @minimum = i }
16
16
  end
@@ -18,7 +18,7 @@ class JSON::Schema
18
18
  ##
19
19
  # Constrain the number to a maximum value
20
20
  # @param [Integer] i The maximum value
21
- # @return [JSON::Schema::Number] Returns self
21
+ # @return [LLM::Schema::Number] Returns self
22
22
  def max(i)
23
23
  tap { @maximum = i }
24
24
  end
@@ -26,7 +26,7 @@ class JSON::Schema
26
26
  ##
27
27
  # Constrain the number to a multiple of a given value
28
28
  # @param [Integer] i The multiple
29
- # @return [JSON::Schema::Number] Returns self
29
+ # @return [LLM::Schema::Number] Returns self
30
30
  def multiple_of(i)
31
31
  tap { @multiple_of = i }
32
32
  end
@@ -1,11 +1,11 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- class JSON::Schema
3
+ class LLM::Schema
4
4
  ##
5
- # The {JSON::Schema::Leaf JSON::Schema::Leaf} class is the
5
+ # The {LLM::Schema::Leaf LLM::Schema::Leaf} class is the
6
6
  # superclass of all values that can appear in a JSON schema.
7
- # See the instance methods of {JSON::Schema JSON::Schema} for
8
- # an example of how to create instances of {JSON::Schema::Leaf JSON::Schema::Leaf}
7
+ # See the instance methods of {LLM::Schema LLM::Schema} for
8
+ # an example of how to create instances of {LLM::Schema::Leaf LLM::Schema::Leaf}
9
9
  # through its subclasses.
10
10
  class Leaf
11
11
  def initialize
@@ -19,7 +19,7 @@ class JSON::Schema
19
19
  ##
20
20
  # Set the description of a leaf
21
21
  # @param [String] str The description
22
- # @return [JSON::Schema::Leaf]
22
+ # @return [LLM::Schema::Leaf]
23
23
  def description(str)
24
24
  tap { @description = str }
25
25
  end
@@ -27,7 +27,7 @@ class JSON::Schema
27
27
  ##
28
28
  # Set the default value of a leaf
29
29
  # @param [Object] value The default value
30
- # @return [JSON::Schema::Leaf]
30
+ # @return [LLM::Schema::Leaf]
31
31
  def default(value)
32
32
  tap { @default = value }
33
33
  end
@@ -36,7 +36,7 @@ class JSON::Schema
36
36
  # Set the allowed values of a leaf
37
37
  # @see https://tour.json-schema.org/content/02-Primitive-Types/07-Enumerated-Values-II Enumerated Values
38
38
  # @param [Array] values The allowed values
39
- # @return [JSON::Schema::Leaf]
39
+ # @return [LLM::Schema::Leaf]
40
40
  def enum(*values)
41
41
  tap { @enum = values }
42
42
  end
@@ -45,14 +45,14 @@ class JSON::Schema
45
45
  # Set the value of a leaf to be a constant value
46
46
  # @see https://tour.json-schema.org/content/02-Primitive-Types/08-Defining-Constant-Values Constant Values
47
47
  # @param [Object] value The constant value
48
- # @return [JSON::Schema::Leaf]
48
+ # @return [LLM::Schema::Leaf]
49
49
  def const(value)
50
50
  tap { @const = value }
51
51
  end
52
52
 
53
53
  ##
54
54
  # Denote a leaf as required
55
- # @return [JSON::Schema::Leaf]
55
+ # @return [LLM::Schema::Leaf]
56
56
  def required
57
57
  tap { @required = true }
58
58
  end
@@ -1,10 +1,10 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- class JSON::Schema
3
+ class LLM::Schema
4
4
  ##
5
- # The {JSON::Schema::Null JSON::Schema::Null} class represents a
5
+ # The {LLM::Schema::Null LLM::Schema::Null} class represents a
6
6
  # null value in a JSON schema. It is a subclass of
7
- # {JSON::Schema::Leaf JSON::Schema::Leaf}.
7
+ # {LLM::Schema::Leaf LLM::Schema::Leaf}.
8
8
  class Null < Leaf
9
9
  def to_h
10
10
  super.merge!({type: "null"})
@@ -1,16 +1,16 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- class JSON::Schema
3
+ class LLM::Schema
4
4
  ##
5
- # The {JSON::Schema::Number JSON::Schema::Number} class represents a
5
+ # The {LLM::Schema::Number LLM::Schema::Number} class represents a
6
6
  # a number (either whole or decimal) value in a JSON schema. It is a
7
- # subclass of {JSON::Schema::Leaf JSON::Schema::Leaf} and provides
7
+ # subclass of {LLM::Schema::Leaf LLM::Schema::Leaf} and provides
8
8
  # methods that can act as constraints.
9
9
  class Number < Leaf
10
10
  ##
11
11
  # Constrain the number to a minimum value
12
12
  # @param [Integer, Float] i The minimum value
13
- # @return [JSON::Schema::Number] Returns self
13
+ # @return [LLM::Schema::Number] Returns self
14
14
  def min(i)
15
15
  tap { @minimum = i }
16
16
  end
@@ -18,7 +18,7 @@ class JSON::Schema
18
18
  ##
19
19
  # Constrain the number to a maximum value
20
20
  # @param [Integer, Float] i The maximum value
21
- # @return [JSON::Schema::Number] Returns self
21
+ # @return [LLM::Schema::Number] Returns self
22
22
  def max(i)
23
23
  tap { @maximum = i }
24
24
  end
@@ -26,7 +26,7 @@ class JSON::Schema
26
26
  ##
27
27
  # Constrain the number to a multiple of a given value
28
28
  # @param [Integer, Float] i The multiple
29
- # @return [JSON::Schema::Number] Returns self
29
+ # @return [LLM::Schema::Number] Returns self
30
30
  def multiple_of(i)
31
31
  tap { @multiple_of = i }
32
32
  end
@@ -1,10 +1,10 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- class JSON::Schema
3
+ class LLM::Schema
4
4
  ##
5
- # The {JSON::Schema::Object JSON::Schema::Object} class represents an
5
+ # The {LLM::Schema::Object LLM::Schema::Object} class represents an
6
6
  # object value in a JSON schema. It is a subclass of
7
- # {JSON::Schema::Leaf JSON::Schema::Leaf} and provides methods that
7
+ # {LLM::Schema::Leaf LLM::Schema::Leaf} and provides methods that
8
8
  # can act as constraints.
9
9
  class Object < Leaf
10
10
  def initialize(properties)
@@ -1,16 +1,16 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- class JSON::Schema
3
+ class LLM::Schema
4
4
  ##
5
- # The {JSON::Schema::String JSON::Schema::String} class represents a
5
+ # The {LLM::Schema::String LLM::Schema::String} class represents a
6
6
  # string value in a JSON schema. It is a subclass of
7
- # {JSON::Schema::Leaf JSON::Schema::Leaf} and provides methods that
7
+ # {LLM::Schema::Leaf LLM::Schema::Leaf} and provides methods that
8
8
  # can act as constraints.
9
9
  class String < Leaf
10
10
  ##
11
11
  # Constrain the string to a minimum length
12
12
  # @param [Integer] i The minimum length
13
- # @return [JSON::Schema::String] Returns self
13
+ # @return [LLM::Schema::String] Returns self
14
14
  def min(i)
15
15
  tap { @minimum = i }
16
16
  end
@@ -18,7 +18,7 @@ class JSON::Schema
18
18
  ##
19
19
  # Constrain the string to a maximum length
20
20
  # @param [Integer] i The maximum length
21
- # @return [JSON::Schema::String] Returns self
21
+ # @return [LLM::Schema::String] Returns self
22
22
  def max(i)
23
23
  tap { @maximum = i }
24
24
  end
@@ -3,6 +3,6 @@
3
3
  module JSON
4
4
  end unless defined?(JSON)
5
5
 
6
- class JSON::Schema
6
+ class LLM::Schema
7
7
  VERSION = "0.1.0"
8
8
  end
@@ -1,10 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- module JSON
4
- end unless defined?(JSON)
5
-
6
3
  ##
7
- # The {JSON::Schema JSON::Schema} class represents a JSON schema,
4
+ # The {LLM::Schema LLM::Schema} class represents a JSON schema,
8
5
  # and provides methods that let you describe and produce a schema
9
6
  # that can be used in various contexts that include the validation
10
7
  # and generation of JSON data.
@@ -13,14 +10,14 @@ end unless defined?(JSON)
13
10
  # @see https://tour.json-schema.org/ JSON Schema Tour
14
11
  #
15
12
  # @example
16
- # schema = JSON::Schema.new
13
+ # schema = LLM::Schema.new
17
14
  # schema.object({
18
15
  # name: schema.string.enum("John", "Jane").required,
19
16
  # age: schema.integer.required,
20
17
  # hobbies: schema.array(schema.string, schema.null).required,
21
18
  # address: schema.object({street: schema.string}).required,
22
19
  # })
23
- class JSON::Schema
20
+ class LLM::Schema
24
21
  require_relative "schema/version"
25
22
  require_relative "schema/leaf"
26
23
  require_relative "schema/object"
@@ -34,7 +31,7 @@ class JSON::Schema
34
31
  ##
35
32
  # Returns an object
36
33
  # @param [Hash] properties A hash of properties
37
- # @return [JSON::Schema::Object]
34
+ # @return [LLM::Schema::Object]
38
35
  def object(properties)
39
36
  Object.new(properties)
40
37
  end
@@ -42,42 +39,42 @@ class JSON::Schema
42
39
  ##
43
40
  # Returns an array
44
41
  # @param [Array] items An array of items
45
- # @return [JSON::Schema::Array]
42
+ # @return [LLM::Schema::Array]
46
43
  def array(*items)
47
44
  Array.new(*items)
48
45
  end
49
46
 
50
47
  ##
51
48
  # Returns a string
52
- # @return [JSON::Schema::String]
49
+ # @return [LLM::Schema::String]
53
50
  def string
54
51
  String.new
55
52
  end
56
53
 
57
54
  ##
58
55
  # Returns a number
59
- # @return [JSON::Schema::Number] a number
56
+ # @return [LLM::Schema::Number] a number
60
57
  def number
61
58
  Number.new
62
59
  end
63
60
 
64
61
  ##
65
62
  # Returns an integer
66
- # @return [JSON::Schema::Integer]
63
+ # @return [LLM::Schema::Integer]
67
64
  def integer
68
65
  Integer.new
69
66
  end
70
67
 
71
68
  ##
72
69
  # Returns a boolean
73
- # @return [JSON::Schema::Boolean]
70
+ # @return [LLM::Schema::Boolean]
74
71
  def boolean
75
72
  Boolean.new
76
73
  end
77
74
 
78
75
  ##
79
76
  # Returns null
80
- # @return [JSON::Schema::Null]
77
+ # @return [LLM::Schema::Null]
81
78
  def null
82
79
  Null.new
83
80
  end
data/lib/llm/version.rb CHANGED
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module LLM
4
- VERSION = "0.12.0"
4
+ VERSION = "0.14.0"
5
5
  end