omniai 2.5.0 → 2.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 89f3eb0b1c62e35bc437833ad64c2c48504ead7bab049b1fc13cbdde30f5c6fe
4
- data.tar.gz: bd8f3e7692996e622e245bd82f4fccc86df703836062d9542d59ec8119e09fe7
3
+ metadata.gz: 488be723c0a59ca0095eb3a1f64081cceff14ae982208e598da5058ee0ea0c4b
4
+ data.tar.gz: 47805f82344429912eaa2e0f00299cc95868d35d04a6d424f698b719bf9d0fe8
5
5
  SHA512:
6
- metadata.gz: eb4748a2dc375e1e36dc096605adacd9822e9f7217ffb5a5c0516bbd21f18228dfdcc329346308ccc9b7a114bc24ee2a5abf04e3c09e06ee50cceac72f8f6d09
7
- data.tar.gz: 6c814845c8fed1e20a042abe94c9ba74227f671a00b9551c79b1b12faedac2a7a998e9a325c942e8b07bdc1ebe55f62ef13563eb982b07ef27b9046695b00295
6
+ metadata.gz: 1c0d66e74c7a0e1eb0296e371b0339d44cedbb7a747c2beeb7cb0547303140d0a6550fd8d7fc6629c09804c57cfcf780dec38f01f6c8aac49004ab28e9968872
7
+ data.tar.gz: aeb129f452a6ea0a9c2e33a25afbe5f8be1e7b27445fb58399556e7bdd0475b0840b36465c6b33edbdf857c6d943c10824b0cc21b7e6a917d161829e4ca033a6
data/README.md CHANGED
@@ -88,40 +88,70 @@ require 'omniai/google'
88
88
 
89
89
  client = OmniAI::Google::Client.new
90
90
 
91
- class Weather < OmniAI::Tool
92
- description "Lookup the weather for a location"
91
+ class WeatherTool < OmniAI::Tool
92
+ description "Lookup the weather for a lat / lng."
93
93
 
94
- parameter :location, :string, description: "A location (e.g. 'Toronto, Canada')."
94
+ parameter :lat, :number, description: "The latitude of the location."
95
+ parameter :lng, :number, description: "The longitude of the location."
95
96
  parameter :unit, :string, enum: %w[Celsius Fahrenheit], description: "The unit of measurement."
97
+ required %i[lat lng]
98
+
99
+ # @param lat [Float]
100
+ # @param lng [Float]
101
+ # @param unit [String] "Celsius" or "Fahrenheit"
102
+ #
103
+ # @return [String] e.g. "20° Celsius at lat=43.7 lng=-79.4"
104
+ def execute(lat:, lng:, unit: "Celsius")
105
+ puts "[weather] lat=#{lat} lng=#{lng} unit=#{unit}"
106
+ "#{rand(20..50)}° #{unit} at lat=#{lat} lng=#{lng}"
107
+ end
108
+ end
109
+
110
+ class GeocodeTool < OmniAI::Tool
111
+ description "Lookup the latitude and longitude of a location."
112
+
113
+ parameter :location, :string, description: "The location to geocode."
96
114
  required %i[location]
97
115
 
98
- # @param location [String] required
99
- # @param unit [String] optional - "Celsius" or "Fahrenheit"
100
- # @return [String]
101
- def execute(location:, unit: "Celsius")
102
- puts "[weather] location=#{location} unit=#{unit}"
103
- "#{rand(20..50)}° #{unit} at #{location}"
116
+ # @param location [String] "Toronto, Canada"
117
+ #
118
+ # @return [Hash] { lat: Float, lng: Float, location: String }
119
+ def execute(location:)
120
+ puts "[geocode] location=#{location}"
121
+
122
+ {
123
+ lat: rand(-90.0..+90.0),
124
+ lng: rand(-180.0..+180.0),
125
+ location:,
126
+ }
104
127
  end
105
128
  end
106
129
 
107
- client.chat(stream: $stdout, tools: [Weather.new]) do |prompt|
130
+ tools = [
131
+ WeatherTool.new,
132
+ GeocodeTool.new,
133
+ ]
134
+
135
+ client.chat(stream: $stdout, tools:) do |prompt|
108
136
  prompt.system "You are an expert in weather."
109
137
  prompt.user 'What is the weather in "London" in Celsius and "Madrid" in Fahrenheit?'
110
138
  end
111
139
  ```
112
140
 
113
141
  ```
114
- [weather] location=London unit=Celsius
115
- [weather] location=Madrid unit=Fahrenheit
116
- ```
142
+ [geocode] location=London
143
+ [weather] lat=... lng=... unit=Celsius
144
+ [geocode] location=Madrid
145
+ [weather] lat=... lng=... unit=Fahrenheit
117
146
 
118
- ```
119
147
  The weather is 24° Celsius in London and 42° Fahrenheit in Madrid.
120
148
  ```
121
149
 
150
+ _For a set of pre-built tools for interacting with browsers, databases, docker, and more try the [OmniAI::Tools](https://omniai-tools.ksylvest.com/) project._
151
+
122
152
  ### Example #5: [Chat w/ History](https://github.com/ksylvest/omniai/blob/main/examples/chat_with_history)
123
153
 
124
- Tracking a prompt history over multiple user and assistant messages is especially helpful when building an agent like conversation experience. A prompt can be used to track this back and forth conversation:
154
+ Tracking a prompt history over multiple user and assistant messages is especially helpful when building an agent like conversation experience. A prompt can be used to track this back-and-forth conversation:
125
155
 
126
156
  ```ruby
127
157
  require "omniai/openai"
@@ -146,19 +176,59 @@ loop do
146
176
  end
147
177
  ```
148
178
 
149
- ```
150
- Type 'exit' or 'quit' to leave.
179
+ ### Example #6 [Chat w/ Schema](https://github.com/ksylvest/omniai/blob/main/examples/chat_with_schema)
151
180
 
152
- > What is the capital of France?
153
- The capital of France is Paris.
154
- La capitale de la France est Paris.
181
+ Requesting structured data back from an LLM is possible by defining a schema, then passing the schema into the chat. The following example defines a structured schema using `OmniAI::Schema` to model a `Contact`. The results of the LLM call are then parsed using the schema to ensure all types are correct.
155
182
 
156
- > How many people live there?
157
- The population of Paris is approximately 2.1 million.
158
- La population de Paris est d’environ 2,1 million.
183
+ ```ruby
184
+ format = OmniAI::Schema.format(name: "Contact", schema: OmniAI::Schema.object(
185
+ description: "A contact with a name, relationship, and addresses.",
186
+ properties: {
187
+ name: OmniAI::Schema.string,
188
+ relationship: OmniAI::Schema.string(enum: %w[friend family]),
189
+ addresses: OmniAI::Schema.array(
190
+ items: OmniAI::Schema.object(
191
+ title: "Address",
192
+ description: "An address with street, city, state, and zip code.",
193
+ properties: {
194
+ street: OmniAI::Schema.string,
195
+ city: OmniAI::Schema.string,
196
+ state: OmniAI::Schema.string,
197
+ zip: OmniAI::Schema.string,
198
+ },
199
+ required: %i[street city state zip]
200
+ )
201
+ ),
202
+ },
203
+ required: %i[name]
204
+ ))
205
+
206
+ response = client.chat(format:) do |prompt|
207
+ prompt.user <<~TEXT
208
+ Parse the following contact:
209
+
210
+ NAME: George Harrison
211
+ RELATIONSHIP: friend
212
+ HOME: 123 Main St, Springfield, IL, 12345
213
+ WORK: 456 Elm St, Springfield, IL, 12345
214
+ TEXT
215
+ end
216
+
217
+ puts format.parse(response.text)
159
218
  ```
160
219
 
161
- ### Example #6: [Chat w/ CLI](https://github.com/ksylvest/omniai/blob/main/examples/chat_with_cli)
220
+ ```
221
+ {
222
+ name: "George Harrison",
223
+ relationship: "friend",
224
+ addresses: [
225
+ { street: "123 Main St", city: "Springfield", state: "IL", zip: "12345" },
226
+ { street: "456 Elm St", city: "Springfield", state: "IL", zip: "12345" },
227
+ ]
228
+ }
229
+ ```
230
+
231
+ ### Example #7: [Chat w/ CLI](https://github.com/ksylvest/omniai/blob/main/examples/chat_with_cli)
162
232
 
163
233
  The `OmniAI` gem also ships with a CLI to simplify quick tests.
164
234
 
@@ -178,7 +248,7 @@ omniai chat --provider="google" --model="gemini-2.0-flash" "Who are you?"
178
248
  I am a large language model, trained by Google.
179
249
  ```
180
250
 
181
- ### Example #7: [Text-to-Speech](https://github.com/ksylvest/omniai/blob/main/examples/text_to_speech)
251
+ ### Example #8: [Text-to-Speech](https://github.com/ksylvest/omniai/blob/main/examples/text_to_speech)
182
252
 
183
253
  This example demonstrates using `OmniAI` with **OpenAI** to convert text to speech and save it to a file.
184
254
 
@@ -194,7 +264,7 @@ File.open(File.join(__dir__, 'audio.wav'), 'wb') do |file|
194
264
  end
195
265
  ```
196
266
 
197
- ### Example #8: [Speech-to-Text](https://github.com/ksylvest/omniai/blob/main/examples/speech_to_text)
267
+ ### Example #9: [Speech-to-Text](https://github.com/ksylvest/omniai/blob/main/examples/speech_to_text)
198
268
 
199
269
  This example demonstrates using `OmniAI` with **OpenAI** to convert speech to text.
200
270
 
@@ -209,7 +279,7 @@ File.open(File.join(__dir__, 'audio.wav'), 'rb') do |file|
209
279
  end
210
280
  ```
211
281
 
212
- ### Example #9: [Embeddings](https://github.com/ksylvest/omniai/blob/main/examples/embeddings)
282
+ ### Example #10: [Embeddings](https://github.com/ksylvest/omniai/blob/main/examples/embeddings)
213
283
 
214
284
  This example demonstrates using `OmniAI` with **Mistral** to generate embeddings for a dataset. It defines a set of entries (e.g. "George is a teacher." or "Ringo is a doctor.") and then compares the embeddings generated from a query (e.g. "What does George do?" or "Who is a doctor?") to rank the entries by relevance.
215
285
 
@@ -355,10 +425,10 @@ require 'omniai/openai'
355
425
  require 'logger'
356
426
 
357
427
  logger = Logger.new(STDOUT)
358
- client = OmniAI::OpenAI::Client.new(timeout: 8) # i.e. 8 seconds
428
+ client = OmniAI::OpenAI::Client.new(timeout: 8) # 8 seconds
359
429
  ```
360
430
 
361
- Timeouts are also configurable by passing a `timeout` hash with `timeout` / `read` / `write` / keys using:
431
+ Timeouts are also configurable by passing a `timeout` hash with `timeout` / `read` / `write` keys using:
362
432
 
363
433
  ```ruby
364
434
  require 'omniai/openai'
@@ -415,7 +485,7 @@ client.chat('Tell me a joke.', stream:)
415
485
 
416
486
  #### Completion using Streaming via IO
417
487
 
418
- The above code can also be supplied any IO (e.g. `File`, `$stdout`, `$stdin`, etc):
488
+ The above code can also be supplied with any IO object (e.g., `File`, `$stdout`, `$stdin`, etc.):
419
489
 
420
490
  ```ruby
421
491
  client.chat('Tell me a story', stream: $stdout)
@@ -423,22 +493,31 @@ client.chat('Tell me a story', stream: $stdout)
423
493
 
424
494
  #### Completion with Tools
425
495
 
426
- A chat can also be initialized with tools:
496
+ A chat can also be initialized using tools:
427
497
 
428
498
  ```ruby
429
- tool = OmniAI::Tool.new(
430
- proc { |location:, unit: 'Celsius'| "#{rand(20..50)}° #{unit} in #{location}" },
431
- name: 'Weather',
432
- description: 'Lookup the weather in a location',
433
- parameters: OmniAI::Tool::Parameters.new(
434
- properties: {
435
- location: OmniAI::Tool::Property.string(description: 'e.g. Toronto'),
436
- unit: OmniAI::Tool::Property.string(enum: %w[Celsius Fahrenheit]),
437
- },
438
- required: %i[location]
439
- )
440
- )
441
- client.chat('What is the weather in "London" in Celsius and "Paris" in Fahrenheit?', tools: [tool])
499
+ class WeatherTool
500
+ description "Lookup the weather at a location in either Celsius or Fahrenheit."
501
+
502
+ parameter :location, :string, description: "The location to find the weather."
503
+ parameter :unit, :string, enum: %w[Celsius Fahrenheit], description: "The unit of measurement."
504
+ required %i[location]
505
+
506
+ # @param location [String]
507
+ # @param unit [String] "Celsius" or "Fahrenheit"
508
+ #
509
+ # @return [Hash]
510
+ def execute(location:, unit: "Celsius")
511
+ puts "[weather] location=#{location} unit=#{unit}"
512
+
513
+ {
514
+ temperature: "#{rand(20..50)}°",
515
+ humidity: rand(0..100),
516
+ }
517
+ end
518
+ end
519
+
520
+ client.chat('What is the weather in "London" in Celsius and "Paris" in Fahrenheit?', tools: [WeatherTool.new])
442
521
  ```
443
522
 
444
523
  ### Transcribe
@@ -463,7 +542,7 @@ end
463
542
 
464
543
  ### Speak
465
544
 
466
- Clients that support speak (e.g. OpenAI w/ "Whisper") convert text to recordings via the following calls:
545
+ Clients that support speak (e.g. OpenAI w/ "Whisper") convert text to speech via the following calls:
467
546
 
468
547
  #### Speech with Stream
469
548
 
@@ -493,12 +572,12 @@ response.usage # <OmniAI::Embed::Usage prompt_tokens=5 total_tokens=5>
493
572
  response.embedding # [0.1, 0.2, ...] >
494
573
  ```
495
574
 
496
- Batches of text can also be converted to embeddings via the following:
575
+ Theese APIs support generating embeddings in batches using the following code:
497
576
 
498
577
  ```ruby
499
578
  response = client.embed([
500
- '',
501
- '',
579
+ 'The quick brown fox jumps over a lazy dog',
580
+ 'Pack my box with five dozen liquor jugs',
502
581
  ])
503
582
  response.usage # <OmniAI::Embed::Usage prompt_tokens=5 total_tokens=5>
504
583
  response.embeddings.each do |embedding|
@@ -508,7 +587,7 @@ end
508
587
 
509
588
  ## CLI
510
589
 
511
- OmniAI packages a basic command line interface (CLI) to allow for exploration of various APIs. A detailed CLI documentation can be found via help:
590
+ OmniAI packages a basic command line interface (CLI) to allow for exploration of various APIs. CLI documentation is available with the `--help` flag:
512
591
 
513
592
  ```bash
514
593
  omniai --help
@@ -103,7 +103,13 @@ module OmniAI
103
103
  serializer = context&.serializer(:message)
104
104
  return serializer.call(self, context:) if serializer
105
105
 
106
- content = @content.is_a?(Array) ? @content.map { |content| content.serialize(context:) } : @content
106
+ content =
107
+ case @content
108
+ when Array then @content.map { |content| content.serialize(context:) }
109
+ when Content then @content.serialize(context:)
110
+ else @content
111
+ end
112
+
107
113
  tool_calls = @tool_call_list&.serialize(context:)
108
114
 
109
115
  { role: @role, content:, tool_calls: }.compact
@@ -29,6 +29,8 @@ module OmniAI
29
29
 
30
30
  # @param data [Hash]
31
31
  # @param context [OmniAI::Context] optional
32
+ #
33
+ # @return [OmniAI::Chat::Response]
32
34
  def self.deserialize(data, context: nil)
33
35
  deserialize = context&.deserializer(:response)
34
36
  return deserialize.call(data, context:) if deserialize
data/lib/omniai/chat.rb CHANGED
@@ -65,7 +65,7 @@ module OmniAI
65
65
  # @param temperature [Float, nil] optional
66
66
  # @param stream [Proc, IO, nil] optional
67
67
  # @param tools [Array<OmniAI::Tool>] optional
68
- # @param format [Symbol, nil] optional - :json
68
+ # @param format [:json, :text, OmniAI::Schema::Object, nil] optional
69
69
  #
70
70
  # @yield [prompt] optional
71
71
  # @yieldparam prompt [OmniAI::Chat::Prompt]
data/lib/omniai/client.rb CHANGED
@@ -34,69 +34,93 @@ module OmniAI
34
34
 
35
35
  # Initialize a client for Anthropic. This method requires the provider if it is undefined.
36
36
  #
37
- # @raise [OmniAI::Error] if the provider is not defined and the gem is not installed
37
+ # @raise [OmniAI::LoadError] if the provider is not defined and the gem is not installed
38
38
  #
39
39
  # @return [Class<OmniAI::Client>]
40
40
  def self.anthropic
41
41
  require "omniai/anthropic" unless defined?(OmniAI::Anthropic::Client)
42
42
  OmniAI::Anthropic::Client
43
43
  rescue ::LoadError
44
- raise Error, "requires 'omniai-anthropic': `gem install omniai-anthropic`"
44
+ raise LoadError, "requires 'omniai-anthropic': `gem install omniai-anthropic`"
45
45
  end
46
46
 
47
47
  # Initialize a client for DeepSeek. This method requires the provider if it is undefined.
48
48
  #
49
- # @raise [OmniAI::Error] if the provider is not defined and the gem is not installed
49
+ # @raise [OmniAI::LoadError] if the provider is not defined and the gem is not installed
50
50
  #
51
51
  # @return [Class<OmniAI::Client>]
52
52
  def self.deepseek
53
53
  require "omniai/deepseek" unless defined?(OmniAI::DeepSeek::Client)
54
54
  OmniAI::DeepSeek::Client
55
55
  rescue ::LoadError
56
- raise Error, "requires 'omniai-deepseek': `gem install omniai-deepseek`"
56
+ raise LoadError, "requires 'omniai-deepseek': `gem install omniai-deepseek`"
57
57
  end
58
58
 
59
59
  # Lookup the `OmniAI::Google::Client``. This method requires the provider if it is undefined.
60
60
  #
61
- # @raise [OmniAI::Error] if the provider is not defined and the gem is not installed
61
+ # @raise [OmniAI::LoadError] if the provider is not defined and the gem is not installed
62
62
  #
63
63
  # @return [Class<OmniAI::Client>]
64
64
  def self.google
65
65
  require "omniai/google" unless defined?(OmniAI::Google::Client)
66
66
  OmniAI::Google::Client
67
67
  rescue ::LoadError
68
- raise Error, "requires 'omniai-google': `gem install omniai-google`"
68
+ raise LoadError, "requires 'omniai-google': `gem install omniai-google`"
69
69
  end
70
70
 
71
71
  # Initialize a client for Mistral. This method requires the provider if it is undefined.
72
72
  #
73
- # @raise [OmniAI::Error] if the provider is not defined and the gem is not installed
73
+ # @raise [OmniAI::LoadError] if the provider is not defined and the gem is not installed
74
74
  #
75
75
  # @return [Class<OmniAI::Client>]
76
76
  def self.mistral
77
77
  require "omniai/mistral" unless defined?(OmniAI::Mistral::Client)
78
78
  OmniAI::Mistral::Client
79
79
  rescue ::LoadError
80
- raise Error, "requires 'omniai-mistral': `gem install omniai-mistral`"
80
+ raise LoadError, "requires 'omniai-mistral': `gem install omniai-mistral`"
81
81
  end
82
82
 
83
83
  # Initialize a client for OpenAI. This method requires the provider if it is undefined.
84
84
  #
85
- # @raise [OmniAI::Error] if the provider is not defined and the gem is not installed
85
+ # @raise [OmniAI::LoadError] if the provider is not defined and the gem is not installed
86
86
  #
87
87
  # @return [Class<OmniAI::Client>]
88
88
  def self.openai
89
89
  require "omniai/openai" unless defined?(OmniAI::OpenAI::Client)
90
90
  OmniAI::OpenAI::Client
91
91
  rescue ::LoadError
92
- raise Error, "requires 'omniai-openai': `gem install omniai-openai`"
92
+ raise LoadError, "requires 'omniai-openai': `gem install omniai-openai`"
93
+ end
94
+
95
+ # Discover a client by provider ('openai' then 'anthropic' then 'google' then 'mistral' then 'deepseek').
96
+ #
97
+ # @raise [OmniAI::LoadError] if no providers are installed
98
+ #
99
+ # @return [OmniAI::Client]
100
+ def self.discover(**)
101
+ %i[openai anthropic google mistral deepseek].each do |provider|
102
+ return find(provider:, **)
103
+ rescue LoadError
104
+ next
105
+ end
106
+
107
+ raise LoadError, <<~TEXT
108
+ requires 'omniai-openai' or 'omniai-anthropic' or 'openai-deepseek' or 'omniai-google' or 'omniai-mistral':
109
+
110
+ `gem install omniai-openai`
111
+ `gem install omniai-anthropic`
112
+ `gem install omniai-deepseek`
113
+ `gem install omniai-google`
114
+ `gem install omniai-mistral`
115
+ TEXT
93
116
  end
94
117
 
95
118
  # Initialize a client by provider (e.g. 'openai'). This method attempts to require the provider.
96
119
  #
97
- # @param provider [String, Symbol] required (e.g. 'anthropic', 'deepsek', 'google', 'mistral', 'openai', etc)
98
120
  #
99
- # @raise [OmniAI::Error] if the provider is not defined and the gem is not installed
121
+ # @param provider [String, Symbol] required (e.g. 'anthropic', 'deepseek', 'google', 'mistral', 'openai', etc)
122
+ #
123
+ # @raise [OmniAI::LoadError] if the provider is not defined and the gem is not installed
100
124
  #
101
125
  # @return [OmniAI::Client]
102
126
  def self.find(provider:, **)
@@ -0,0 +1,7 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ # An error that wraps a ::LoadError for a missing library.
5
+ class LoadError < Error
6
+ end
7
+ end
@@ -0,0 +1,96 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module Schema
5
+ # @example
6
+ # array = OmniAI::Tool::Array.deserialize({
7
+ # description: "A list of people.",
8
+ # items: {
9
+ # properties: {
10
+ # name: { type: "string" },
11
+ # },
12
+ # required: ["name"],
13
+ # },
14
+ # min_items: 1,
15
+ # max_items: 5,
16
+ # })
17
+ # array.serialize # => { type: "array", items: { ... }, minItems: 1, maxItems: 5 }
18
+ # array.parse([{ "name" => "Ringo Starr" }]) # => [{ name: "Ringo Star" }]
19
+ class Array
20
+ TYPE = "array"
21
+
22
+ # @!attribute [rw] items
23
+ # @return [OmniAI::Schema::Object, OmniAI::Schema::Array, OmniAI::Schema::Scalar]
24
+ attr_accessor :items
25
+
26
+ # @!attribute [rw] max_items
27
+ # @return [Integer, nil]
28
+ attr_accessor :max_items
29
+
30
+ # @!attribute [rw] min_items
31
+ # @return [Integer, nil]
32
+ attr_accessor :min_items
33
+
34
+ # @!attribute [rw] description
35
+ # @return [String, nil]
36
+ attr_accessor :description
37
+
38
+ # @example
39
+ # array = OmniAI::Schema::Array.deserialize({
40
+ # type: "array",
41
+ # items: { type: "string" },
42
+ # minItems: 1,
43
+ # maxItems: 5,
44
+ # description: "A list of strings."
45
+ # }) # => OmniAI::Schema::Array
46
+ #
47
+ # @param data [Hash]
48
+ #
49
+ # @return [OmniAI::Schema::Array]
50
+ def self.deserialize(data)
51
+ new(
52
+ items: OmniAI::Schema.deserialize(data["items"] || data[:items]),
53
+ max_items: data[:maxItems] || data["maxItems"],
54
+ min_items: data[:minItems] || data["minItems"],
55
+ description: data[:description] || data["description"]
56
+ )
57
+ end
58
+
59
+ # @param items [OmniAI::Schema::Object, OmniAI::Schema::Array, OmniAI::Schema::Scalar] required
60
+ # @param min_items [Integer] optional
61
+ # @param max_items [Integer] optional
62
+ # @param description [String] optional
63
+ def initialize(items:, min_items: nil, max_items: nil, description: nil)
64
+ super()
65
+ @items = items
66
+ @min_items = min_items
67
+ @max_items = max_items
68
+ @description = description
69
+ end
70
+
71
+ # @example
72
+ # array.serialize # => { type: "array", items: { type: "string" } }
73
+ #
74
+ # @return [Hash]
75
+ def serialize
76
+ {
77
+ type: TYPE,
78
+ description: @description,
79
+ items: @items.serialize,
80
+ maxItems: @max_items,
81
+ minItems: @min_items,
82
+ }.compact
83
+ end
84
+
85
+ # @example
86
+ # array.parse(["1", "2", "3"]) # => [1, 2, 3]
87
+ #
88
+ # @param data [Array]
89
+ #
90
+ # @return [Array]
91
+ def parse(data)
92
+ data.map { |arg| @items.parse(arg) }
93
+ end
94
+ end
95
+ end
96
+ end
@@ -0,0 +1,71 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module Schema
5
+ # @example
6
+ # format = OmniAI::Schema::Format.deserialize({
7
+ # name: "example",
8
+ # schema: {
9
+ # type: "object",
10
+ # properties: {
11
+ # name: { type: "string" },
12
+ # },
13
+ # required: ["name"],
14
+ # }
15
+ # })
16
+ # format.serialize # => { name: "example", schema: { ... } }
17
+ class Format
18
+ # @!attribute [rw] name
19
+ # @return [String]
20
+ attr_accessor :name
21
+
22
+ # @!attribute [rw] schema
23
+ # @return [OmniAI::Schema::Object]
24
+ attr_accessor :schema
25
+
26
+ # @example
27
+ # array = OmniAI::Schema::Format.deserialize({
28
+ # name: "Contact",
29
+ # schema: { ... },
30
+ # }) # => OmniAI::Schema::Format
31
+ #
32
+ # @param data [Hash]
33
+ #
34
+ # @return [OmniAI::Schema::Format]
35
+ def self.deserialize(data)
36
+ name = data["name"] || data[:name]
37
+ schema = OmniAI::Schema.deserialize(data["schema"] || data[:schema])
38
+
39
+ new(name:, schema:)
40
+ end
41
+
42
+ # @param name [String]
43
+ # @param schema [OmniAI::Schema::Object]
44
+ def initialize(name:, schema:)
45
+ @name = name
46
+ @schema = schema
47
+ end
48
+
49
+ # @example
50
+ # format.serialize # => { name: "...", schema: { ... } }
51
+ #
52
+ # @return [Hash]
53
+ def serialize
54
+ {
55
+ name:,
56
+ schema: schema.serialize,
57
+ }
58
+ end
59
+
60
+ # @example
61
+ # format.parse("{ "name": "Ringo Starr" }"") # => { name: "Ringo Starr" }
62
+ #
63
+ # @param text [String]
64
+ #
65
+ # @return [Hash]
66
+ def parse(text)
67
+ schema.parse(JSON.parse(text))
68
+ end
69
+ end
70
+ end
71
+ end