llm.rb 0.4.2 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +132 -84
  3. data/lib/json/schema/array.rb +5 -0
  4. data/lib/json/schema/boolean.rb +4 -0
  5. data/lib/json/schema/integer.rb +23 -1
  6. data/lib/json/schema/leaf.rb +11 -0
  7. data/lib/json/schema/null.rb +4 -0
  8. data/lib/json/schema/number.rb +23 -1
  9. data/lib/json/schema/object.rb +6 -2
  10. data/lib/json/schema/string.rb +26 -1
  11. data/lib/json/schema/version.rb +2 -0
  12. data/lib/json/schema.rb +10 -10
  13. data/lib/llm/buffer.rb +28 -10
  14. data/lib/llm/chat.rb +26 -1
  15. data/lib/llm/core_ext/ostruct.rb +14 -8
  16. data/lib/llm/file.rb +6 -1
  17. data/lib/llm/function.rb +81 -0
  18. data/lib/llm/message.rb +46 -1
  19. data/lib/llm/providers/anthropic/format/completion_format.rb +73 -0
  20. data/lib/llm/providers/anthropic/format.rb +7 -33
  21. data/lib/llm/providers/anthropic/response_parser/completion_parser.rb +51 -0
  22. data/lib/llm/providers/anthropic/response_parser.rb +1 -9
  23. data/lib/llm/providers/anthropic.rb +4 -3
  24. data/lib/llm/providers/gemini/audio.rb +4 -4
  25. data/lib/llm/providers/gemini/files.rb +5 -4
  26. data/lib/llm/providers/gemini/format/completion_format.rb +54 -0
  27. data/lib/llm/providers/gemini/format.rb +28 -27
  28. data/lib/llm/providers/gemini/images.rb +9 -4
  29. data/lib/llm/providers/gemini/response_parser/completion_parser.rb +46 -0
  30. data/lib/llm/providers/gemini/response_parser.rb +13 -20
  31. data/lib/llm/providers/gemini.rb +3 -12
  32. data/lib/llm/providers/ollama/format/completion_format.rb +72 -0
  33. data/lib/llm/providers/ollama/format.rb +10 -30
  34. data/lib/llm/providers/ollama/response_parser/completion_parser.rb +42 -0
  35. data/lib/llm/providers/ollama/response_parser.rb +8 -11
  36. data/lib/llm/providers/ollama.rb +3 -11
  37. data/lib/llm/providers/openai/audio.rb +6 -6
  38. data/lib/llm/providers/openai/files.rb +3 -3
  39. data/lib/llm/providers/openai/format/completion_format.rb +81 -0
  40. data/lib/llm/providers/openai/format/respond_format.rb +69 -0
  41. data/lib/llm/providers/openai/format.rb +25 -58
  42. data/lib/llm/providers/openai/images.rb +4 -2
  43. data/lib/llm/providers/openai/response_parser/completion_parser.rb +55 -0
  44. data/lib/llm/providers/openai/response_parser/respond_parser.rb +56 -0
  45. data/lib/llm/providers/openai/response_parser.rb +8 -44
  46. data/lib/llm/providers/openai/responses.rb +10 -11
  47. data/lib/llm/providers/openai.rb +5 -16
  48. data/lib/llm/response/{output.rb → respond.rb} +2 -2
  49. data/lib/llm/response.rb +1 -1
  50. data/lib/llm/version.rb +1 -1
  51. data/lib/llm.rb +28 -0
  52. data/llm.gemspec +1 -0
  53. metadata +28 -3
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 2ae98573410bfc74a61f0bf15f811c30800b6567c9e5fc9e4cbe50f4994a200f
4
- data.tar.gz: 9776839912017f5f3bac8452670c60b3bc9eabecebe11c402458a36b78d958e7
3
+ metadata.gz: 50bc0de3950bfbc8dd1a9ff3b211915fff5ae510f6fd0f0fc277e5cbf689e5ea
4
+ data.tar.gz: 3e48e23abefeb652429cb145be5fa1166e78cb24f3ddfb7440daff2e4a461c5d
5
5
  SHA512:
6
- metadata.gz: 4252d2d861d409428067415340c2d36f2e75f34f3be9905a576a63455902d1faf4100d8c8c7060e683fe922ff095cc00fd56fd135b9589a746d9de716c66e5e5
7
- data.tar.gz: 819676961e401aa5e27678e3d36af406362f0a68b0243e1e1f71f5f135286830dd1c009c47c69c26b6789a771da6a1fe72eb023f0f1833c20ffadac37f58ed1b
6
+ metadata.gz: 8b6ad1955541753ba991a0f9b55e261cb4cc32a3a85470cee7312359cafce7d4689757a2f5efe2f641c9813dca5b7722b47ff8dc12d90f8a600eff1e3c7b5b4d
7
+ data.tar.gz: b807543c44e12d4a251370a84cda59714a9b62cf3bf031203d723a8740167aab5a4f9348ae29856b6324384f2042d1851c05c272ba3a6510b33d99d6fddf4fdc
data/README.md CHANGED
@@ -1,11 +1,31 @@
1
1
  ## About
2
2
 
3
- llm.rb is a lightweight library that provides a common interface
4
- and set of functionality for multiple Large Language Models (LLMs). It
5
- is designed to be simple, flexible, and easy to use – and it has been
6
- implemented with zero dependencies outside Ruby's standard library. See the
7
- [philosophy](#philosophy) section for more information on the design principles
8
- behind llm.rb.
3
+ llm.rb is a zero-dependency Ruby toolkit for Large Language Models like
4
+ OpenAI, Gemini, Anthropic, and more. It’s fast, clean, and composable
5
+ with full support for chat, tool calling, audio, images, files, and
6
+ JSON Schema generation.
7
+
8
+ ## Features
9
+
10
+ #### General
11
+ - ✅ Unified interface for OpenAI, Gemini, Anthropic, Ollama, and more
12
+ - 📦 Zero dependencies outside Ruby's standard library
13
+ - 🔌 Model introspection and selection
14
+ - 🚀 Optimized for performance and low memory usage
15
+
16
+ #### Chat, Agents
17
+ - 🧠 Stateless and stateful chat via completions and responses API
18
+ - 🤖 Tool calling and function execution
19
+ - 🗂️ JSON Schema support for structured, validated responses
20
+
21
+ #### Media
22
+ - 🗣️ Text-to-speech, transcription, and translation
23
+ - 🖼️ Image generation, editing, and variation support
24
+ - 📎 File uploads and prompt-aware file interaction
25
+ - 💡 Multimodal prompts (text, URLs, files)
26
+
27
+ #### Embeddings
28
+ - 🧮 Text embeddings and vector support
9
29
 
10
30
  ## Examples
11
31
 
@@ -150,6 +170,56 @@ bot.chat "Does the earth orbit the sun?", :user
150
170
  bot.messages.find(&:assistant?).content! # => {probability: 1}
151
171
  ```
152
172
 
173
+ ### Tools
174
+
175
+ #### Functions
176
+
177
+ The OpenAI, Anthropic, Gemini and Ollama providers support a powerful feature known as
178
+ tool calling, and although it is a little complex to understand at first,
179
+ it can be powerful for building agents. The following example demonstrates how we
180
+ can define a local function (which happens to be a tool), and OpenAI can
181
+ then detect when we should call the function.
182
+
183
+ The
184
+ [LLM::Chat#functions](https://0x1eef.github.io/x/llm.rb/LLM/Chat.html#functions-instance_method)
185
+ method returns an array of functions that can be called after sending a message and
186
+ it will only be populated if the LLM detects a function should be called. Each function
187
+ corresponds to an element in the "tools" array. The array is emptied after a function call,
188
+ and potentially repopulated on the next message.
189
+
190
+ The following example defines an agent that can run system commands based on natural language,
191
+ and it is only intended to be a fun demo of tool calling - it is not recommended to run
192
+ arbitrary commands from a LLM without sanitizing the input first :) Without further ado:
193
+
194
+ ```ruby
195
+ #!/usr/bin/env ruby
196
+ require "llm"
197
+
198
+ llm = LLM.openai(ENV["KEY"])
199
+ tool = LLM.function(:system) do |fn|
200
+ fn.description "Run a shell command"
201
+ fn.params do |schema|
202
+ schema.object(command: schema.string.required)
203
+ end
204
+ fn.define do |params|
205
+ system(params.command)
206
+ end
207
+ end
208
+
209
+ bot = LLM::Chat.new(llm, tools: [tool]).lazy
210
+ bot.chat "Your task is to run shell commands via a tool.", :system
211
+
212
+ bot.chat "What is the current date?", :user
213
+ bot.chat bot.functions.map(&:call) # report return value to the LLM
214
+
215
+ bot.chat "What operating system am I running? (short version please!)", :user
216
+ bot.chat bot.functions.map(&:call) # report return value to the LLM
217
+
218
+ ##
219
+ # Thu May 1 10:01:02 UTC 2025
220
+ # FreeBSD
221
+ ```
222
+
153
223
  ### Audio
154
224
 
155
225
  #### Speech
@@ -159,8 +229,7 @@ can create speech from text, transcribe audio to text, or translate
159
229
  audio to text (usually English). The following example uses the OpenAI provider
160
230
  to create an audio file from a text prompt. The audio is then moved to
161
231
  `${HOME}/hello.mp3` as the final step. As always, consult the provider's
162
- documentation (eg [OpenAI docs](https://platform.openai.com/docs/api-reference/audio/create))
163
- for more information on how to use the audio generation API:
232
+ documentation for more information on how to use the audio generation API:
164
233
 
165
234
  ```ruby
166
235
  #!/usr/bin/env ruby
@@ -177,16 +246,7 @@ The following example transcribes an audio file to text. The audio file
177
246
  (`${HOME}/hello.mp3`) was theoretically created in the previous example,
178
247
  and the result is printed to the console. The example uses the OpenAI
179
248
  provider to transcribe the audio file. As always, consult the provider's
180
- documentation (eg
181
- [OpenAI docs](https://platform.openai.com/docs/api-reference/audio/createTranscription),
182
- [Gemini docs](https://ai.google.dev/gemini-api/docs/audio))
183
- for more information on how to use the audio transcription API.
184
-
185
- Please also see provider-specific documentation for more provider-specific
186
- examples and documentation
187
- (eg
188
- [LLM::Gemini::Audio](https://0x1eef.github.io/x/llm.rb/LLM/Gemini/Audio.html),
189
- [LLM::OpenAI::Audio](https://0x1eef.github.io/x/llm.rb/LLM/OpenAI/Audio.html)):
249
+ documentation for more information on how to use the audio transcription API:
190
250
 
191
251
  ```ruby
192
252
  #!/usr/bin/env ruby
@@ -194,7 +254,7 @@ require "llm"
194
254
 
195
255
  llm = LLM.openai(ENV["KEY"])
196
256
  res = llm.audio.create_transcription(
197
- file: LLM::File(File.join(Dir.home, "hello.mp3"))
257
+ file: File.join(Dir.home, "hello.mp3")
198
258
  )
199
259
  print res.text, "\n" # => "Hello world."
200
260
  ```
@@ -205,17 +265,8 @@ The following example translates an audio file to text. In this example
205
265
  the audio file (`${HOME}/bomdia.mp3`) is theoretically in Portuguese,
206
266
  and it is translated to English. The example uses the OpenAI provider,
207
267
  and at the time of writing, it can only translate to English. As always,
208
- consult the provider's documentation (eg
209
- [OpenAI docs](https://platform.openai.com/docs/api-reference/audio/createTranslation),
210
- [Gemini docs](https://ai.google.dev/gemini-api/docs/audio))
211
- for more information on how to use the audio translation API.
212
-
213
- Please also see provider-specific documentation for more provider-specific
214
- examples and documentation
215
- (eg
216
- [LLM::Gemini::Audio](https://0x1eef.github.io/x/llm.rb/LLM/Gemini/Audio.html),
217
- [LLM::OpenAI::Audio](https://0x1eef.github.io/x/llm.rb/LLM/OpenAI/Audio.html)):
218
-
268
+ consult the provider's documentation for more information on how to use
269
+ the audio translation API:
219
270
 
220
271
  ```ruby
221
272
  #!/usr/bin/env ruby
@@ -223,7 +274,7 @@ require "llm"
223
274
 
224
275
  llm = LLM.openai(ENV["KEY"])
225
276
  res = llm.audio.create_translation(
226
- file: LLM::File(File.join(Dir.home, "bomdia.mp3"))
277
+ file: File.join(Dir.home, "bomdia.mp3")
227
278
  )
228
279
  print res.text, "\n" # => "Good morning."
229
280
  ```
@@ -236,13 +287,7 @@ Some but not all LLM providers implement image generation capabilities that
236
287
  can create new images from a prompt, or edit an existing image with a
237
288
  prompt. The following example uses the OpenAI provider to create an
238
289
  image of a dog on a rocket to the moon. The image is then moved to
239
- `${HOME}/dogonrocket.png` as the final step.
240
-
241
- Please also see provider-specific documentation for more provider-specific
242
- examples and documentation
243
- (eg
244
- [LLM::Gemini::Images](https://0x1eef.github.io/x/llm.rb/LLM/Gemini/Images.html),
245
- [LLM::OpenAI::Images](https://0x1eef.github.io/x/llm.rb/LLM/OpenAI/Images.html)):
290
+ `${HOME}/dogonrocket.png` as the final step:
246
291
 
247
292
  ```ruby
248
293
  #!/usr/bin/env ruby
@@ -266,17 +311,8 @@ now wearing a hat. The image is then moved to `${HOME}/catwithhat.png` as
266
311
  the final step.
267
312
 
268
313
  Results and quality may vary, consider prompt adjustments if the results
269
- are not satisfactory, and consult the provider's documentation
270
- (eg
271
- [OpenAI docs](https://platform.openai.com/docs/api-reference/images/createEdit),
272
- [Gemini docs](https://ai.google.dev/gemini-api/docs/image-generation))
273
- for more information on how to use the image editing API.
274
-
275
- Please also see provider-specific documentation for more provider-specific
276
- examples and documentation
277
- (eg
278
- [LLM::Gemini::Images](https://0x1eef.github.io/x/llm.rb/LLM/Gemini/Images.html),
279
- [LLM::OpenAI::Images](https://0x1eef.github.io/x/llm.rb/LLM/OpenAI/Images.html)):
314
+ are not as expected, and consult the provider's documentation
315
+ for more information on how to use the image editing API:
280
316
 
281
317
  ```ruby
282
318
  #!/usr/bin/env ruby
@@ -286,7 +322,7 @@ require "fileutils"
286
322
 
287
323
  llm = LLM.openai(ENV["KEY"])
288
324
  res = llm.images.edit(
289
- image: LLM::File("/images/cat.png"),
325
+ image: "/images/cat.png",
290
326
  prompt: "a cat with a hat",
291
327
  )
292
328
  res.urls.each do |url|
@@ -300,9 +336,8 @@ end
300
336
  The following example is focused on creating variations of a local image.
301
337
  The image (`/images/cat.png`) is returned to us with five different variations.
302
338
  The images are then moved to `${HOME}/catvariation0.png`, `${HOME}/catvariation1.png`
303
- and so on as the final step. Consult the provider's documentation
304
- (eg [OpenAI docs](https://platform.openai.com/docs/api-reference/images/createVariation))
305
- for more information on how to use the image variations API:
339
+ and so on as the final step. Consult the provider's documentation for more information
340
+ on how to use the image variations API:
306
341
 
307
342
  ```ruby
308
343
  #!/usr/bin/env ruby
@@ -312,7 +347,7 @@ require "fileutils"
312
347
 
313
348
  llm = LLM.openai(ENV["KEY"])
314
349
  res = llm.images.create_variation(
315
- image: LLM::File("/images/cat.png"),
350
+ image: "/images/cat.png",
316
351
  n: 5
317
352
  )
318
353
  res.urls.each.with_index do |url, index|
@@ -331,13 +366,8 @@ for this feature. The following example uses the OpenAI provider to describe
331
366
  the contents of a PDF file after it has been uploaded. The file (an instance
332
367
  of [LLM::Response::File](https://0x1eef.github.io/x/llm.rb/LLM/Response/File.html))
333
368
  is passed directly to the chat method, and generally any object a prompt supports
334
- can be given to the chat method.
369
+ can be given to the chat method:
335
370
 
336
- Please also see provider-specific documentation for more provider-specific
337
- examples and documentation
338
- (eg
339
- [LLM::Gemini::Files](https://0x1eef.github.io/x/llm.rb/LLM/Gemini/Files.html),
340
- [LLM::OpenAI::Files](https://0x1eef.github.io/x/llm.rb/LLM/OpenAI/Files.html)):
341
371
 
342
372
  ```ruby
343
373
  #!/usr/bin/env ruby
@@ -345,7 +375,7 @@ require "llm"
345
375
 
346
376
  llm = LLM.openai(ENV["KEY"])
347
377
  bot = LLM::Chat.new(llm).lazy
348
- file = llm.files.create(file: LLM::File("/documents/openbsd_is_awesome.pdf"))
378
+ file = llm.files.create(file: "/documents/openbsd_is_awesome.pdf")
349
379
  bot.chat(file)
350
380
  bot.chat("What is this file about?")
351
381
  bot.messages.select(&:assistant?).each { print "[#{_1.role}] ", _1.content, "\n" }
@@ -368,11 +398,7 @@ objects to describe links, `LLM::File` | `LLM::Response::File` objects
368
398
  to describe files, `String` objects to describe text blobs, or an array
369
399
  of the aforementioned objects to describe multiple objects in a single
370
400
  prompt. Each object is a first class citizen that can be passed directly
371
- to a prompt.
372
-
373
- For more depth and examples on how to use the multimodal API, please see
374
- the [provider-specific documentation](https://0x1eef.github.io/x/llm.rb/)
375
- for more provider-specific examples:
401
+ to a prompt:
376
402
 
377
403
  ```ruby
378
404
  #!/usr/bin/env ruby
@@ -381,19 +407,17 @@ require "llm"
381
407
  llm = LLM.openai(ENV["KEY"])
382
408
  bot = LLM::Chat.new(llm).lazy
383
409
 
384
- bot.chat URI("https://example.com/path/to/image.png")
385
- bot.chat "Describe the above image"
410
+ bot.chat [URI("https://example.com/path/to/image.png"), "Describe the image in the link"]
386
411
  bot.messages.select(&:assistant?).each { print "[#{_1.role}] ", _1.content, "\n" }
387
412
 
388
- file = bot.files.create(file: LLM::File("/documents/openbsd_is_awesome.pdf"))
389
- bot.chat file
390
- bot.chat "What is this file about?"
413
+ file = llm.files.create(file: "/documents/openbsd_is_awesome.pdf")
414
+ bot.chat [file, "What is this file about?"]
391
415
  bot.messages.select(&:assistant?).each { print "[#{_1.role}] ", _1.content, "\n" }
392
416
 
393
- bot.chat [LLM::File("/images/puffy.png"), "What is this image about?"]
417
+ bot.chat [LLM.File("/images/puffy.png"), "What is this image about?"]
394
418
  bot.messages.select(&:assistant?).each { print "[#{_1.role}] ", _1.content, "\n" }
395
419
 
396
- bot.chat [LLM::File("/images/beastie.png"), "What is this image about?"]
420
+ bot.chat [LLM.File("/images/beastie.png"), "What is this image about?"]
397
421
  bot.messages.select(&:assistant?).each { print "[#{_1.role}] ", _1.content, "\n" }
398
422
  ```
399
423
 
@@ -495,6 +519,26 @@ over or doesn't cover at all. The API reference is available at
495
519
  [0x1eef.github.io/x/llm.rb](https://0x1eef.github.io/x/llm.rb).
496
520
 
497
521
 
522
+ ### See also
523
+
524
+ #### Gemini
525
+
526
+ * [LLM::Gemini](https://0x1eef.github.io/x/llm.rb/LLM/Gemini.html)
527
+ * [LLM::Gemini::Images](https://0x1eef.github.io/x/llm.rb/LLM/Gemini/Images.html)
528
+ * [LLM::Gemini::Audio](https://0x1eef.github.io/x/llm.rb/LLM/Gemini/Audio.html)
529
+
530
+ #### OpenAI
531
+
532
+ * [LLM::OpenAI](https://0x1eef.github.io/x/llm.rb/LLM/OpenAI.html)
533
+ * [LLM::OpenAI::Images](https://0x1eef.github.io/x/llm.rb/LLM/OpenAI/Images.html)
534
+ * [LLM::OpenAI::Audio](https://0x1eef.github.io/x/llm.rb/LLM/OpenAI/Audio.html)
535
+
536
+ #### Anthropic
537
+ * [LLM::Anthropic](https://0x1eef.github.io/x/llm.rb/LLM/Anthropic.html)
538
+
539
+ #### Ollama
540
+ * [LLM::Ollama](https://0x1eef.github.io/x/llm.rb/LLM/Ollama.html)
541
+
498
542
  ## Install
499
543
 
500
544
  llm.rb can be installed via rubygems.org:
@@ -503,17 +547,21 @@ llm.rb can be installed via rubygems.org:
503
547
 
504
548
  ## Philosophy
505
549
 
506
- llm.rb was built for developers who believe that simplicity can be challenging
507
- but it is always worth it. It provides a clean, dependency-free interface to
508
- Large Language Models, treating Ruby itself as the primary platform –
509
- not Rails or any other specific framework or library. There is no hidden
510
- magic or complex metaprogramming.
511
-
512
- Every part of llm.rb is designed to be explicit, composable, memory-safe,
513
- and production-ready without compromise. No unnecessary abstractions,
514
- no global configuration, no global state, and no dependencies that aren't
515
- part of standard Ruby. It has been inspired in part by other languages such
516
- as Python, but it is not a port of any other library.
550
+ llm.rb provides a clean, dependency-free interface to Large Language Models,
551
+ treating Ruby itself not Rails or any specific framework as the primary platform.
552
+ It avoids hidden magic, complex metaprogramming, and heavy DSLs. It is intentionally
553
+ simple and won't compromise on being a simple library, even if that means saying no to
554
+ certain features.
555
+
556
+ Instead, it embraces a general-purpose, object-oriented design that prioritizes
557
+ explicitness, composability, and clarity. Code should be easy to follow, test, and adapt.
558
+ For that reason we favor small, cooperating objects over deeply nested blocks — a pattern
559
+ that often emerges in DSL-heavy libraries.
560
+
561
+ Each part of llm.rb is designed to be conscious of memory, ready for production, and free
562
+ from global state or non-standard dependencies. While inspired by ideas from other ecosystems
563
+ (especially Python) it is not a port of any other library — it is a Ruby library written
564
+ by Rubyists who value borrowing good ideas from other languages and ecosystems.
517
565
 
518
566
  ## License
519
567
 
@@ -1,6 +1,11 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class JSON::Schema
4
+ ##
5
+ # The {JSON::Schema::Array JSON::Schema::Array} class represents an
6
+ # array value in a JSON schema. It is a subclass of
7
+ # {JSON::Schema::Leaf JSON::Schema::Leaf} and provides methods that
8
+ # can act as constraints.
4
9
  class Array < Leaf
5
10
  def initialize(*items)
6
11
  @items = items
@@ -1,6 +1,10 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class JSON::Schema
4
+ ##
5
+ # The {JSON::Schema::Boolean JSON::Schema::Boolean} class represents a
6
+ # boolean value in a JSON schema. It is a subclass of
7
+ # {JSON::Schema::Leaf JSON::Schema::Leaf}.
4
8
  class Booelean < Leaf
5
9
  def to_h
6
10
  super.merge!({type: "boolean"})
@@ -1,20 +1,42 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class JSON::Schema
4
+ ##
5
+ # The {JSON::Schema::Integer JSON::Schema::Integer} class represents a
6
+ # whole number value in a JSON schema. It is a subclass of
7
+ # {JSON::Schema::Leaf JSON::Schema::Leaf} and provides methods that
8
+ # can act as constraints.
4
9
  class Integer < Leaf
10
+ ##
11
+ # Constrain the number to a minimum value
12
+ # @param [Integer] i The minimum value
13
+ # @return [JSON::Schema::Number] Returns self
5
14
  def min(i)
6
15
  tap { @minimum = i }
7
16
  end
8
17
 
18
+ ##
19
+ # Constrain the number to a maximum value
20
+ # @param [Integer] i The maximum value
21
+ # @return [JSON::Schema::Number] Returns self
9
22
  def max(i)
10
23
  tap { @maximum = i }
11
24
  end
12
25
 
26
+ ##
27
+ # Constrain the number to a multiple of a given value
28
+ # @param [Integer] i The multiple
29
+ # @return [JSON::Schema::Number] Returns self
30
+ def multiple_of(i)
31
+ tap { @multiple_of = i }
32
+ end
33
+
13
34
  def to_h
14
35
  super.merge!({
15
36
  type: "integer",
16
37
  minimum: @minimum,
17
- maximum: @maximum
38
+ maximum: @maximum,
39
+ multipleOf: @multiple_of
18
40
  }).compact
19
41
  end
20
42
  end
@@ -13,6 +13,7 @@ class JSON::Schema
13
13
  @default = nil
14
14
  @enum = nil
15
15
  @required = nil
16
+ @const = nil
16
17
  end
17
18
 
18
19
  ##
@@ -33,12 +34,22 @@ class JSON::Schema
33
34
 
34
35
  ##
35
36
  # Set the allowed values of a leaf
37
+ # @see https://tour.json-schema.org/content/02-Primitive-Types/07-Enumerated-Values-II Enumerated Values
36
38
  # @param [Array] values The allowed values
37
39
  # @return [JSON::Schema::Leaf]
38
40
  def enum(*values)
39
41
  tap { @enum = values }
40
42
  end
41
43
 
44
+ ##
45
+ # Set the value of a leaf to be a constant value
46
+ # @see https://tour.json-schema.org/content/02-Primitive-Types/08-Defining-Constant-Values Constant Values
47
+ # @param [Object] value The constant value
48
+ # @return [JSON::Schema::Leaf]
49
+ def const(value)
50
+ tap { @const = value }
51
+ end
52
+
42
53
  ##
43
54
  # Denote a leaf as required
44
55
  # @return [JSON::Schema::Leaf]
@@ -1,6 +1,10 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class JSON::Schema
4
+ ##
5
+ # The {JSON::Schema::Null JSON::Schema::Null} class represents a
6
+ # null value in a JSON schema. It is a subclass of
7
+ # {JSON::Schema::Leaf JSON::Schema::Leaf}.
4
8
  class Null < Leaf
5
9
  def to_h
6
10
  super.merge!({type: "null"})
@@ -1,20 +1,42 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class JSON::Schema
4
+ ##
5
+ # The {JSON::Schema::Number JSON::Schema::Number} class represents a
6
+ # a number (either whole or decimal) value in a JSON schema. It is a
7
+ # subclass of {JSON::Schema::Leaf JSON::Schema::Leaf} and provides
8
+ # methods that can act as constraints.
4
9
  class Number < Leaf
10
+ ##
11
+ # Constrain the number to a minimum value
12
+ # @param [Integer, Float] i The minimum value
13
+ # @return [JSON::Schema::Number] Returns self
5
14
  def min(i)
6
15
  tap { @minimum = i }
7
16
  end
8
17
 
18
+ ##
19
+ # Constrain the number to a maximum value
20
+ # @param [Integer, Float] i The maximum value
21
+ # @return [JSON::Schema::Number] Returns self
9
22
  def max(i)
10
23
  tap { @maximum = i }
11
24
  end
12
25
 
26
+ ##
27
+ # Constrain the number to a multiple of a given value
28
+ # @param [Integer, Float] i The multiple
29
+ # @return [JSON::Schema::Number] Returns self
30
+ def multiple_of(i)
31
+ tap { @multiple_of = i }
32
+ end
33
+
13
34
  def to_h
14
35
  super.merge!({
15
36
  type: "number",
16
37
  minimum: @minimum,
17
- maximum: @maximum
38
+ maximum: @maximum,
39
+ multipleOf: @multiple_of
18
40
  }).compact
19
41
  end
20
42
  end
@@ -1,10 +1,14 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class JSON::Schema
4
+ ##
5
+ # The {JSON::Schema::Object JSON::Schema::Object} class represents an
6
+ # object value in a JSON schema. It is a subclass of
7
+ # {JSON::Schema::Leaf JSON::Schema::Leaf} and provides methods that
8
+ # can act as constraints.
4
9
  class Object < Leaf
5
- def initialize(properties, **rest)
10
+ def initialize(properties)
6
11
  @properties = properties
7
- super(**rest)
8
12
  end
9
13
 
10
14
  def to_h
@@ -1,9 +1,34 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class JSON::Schema
4
+ ##
5
+ # The {JSON::Schema::String JSON::Schema::String} class represents a
6
+ # string value in a JSON schema. It is a subclass of
7
+ # {JSON::Schema::Leaf JSON::Schema::Leaf} and provides methods that
8
+ # can act as constraints.
4
9
  class String < Leaf
10
+ ##
11
+ # Constrain the string to a minimum length
12
+ # @param [Integer] i The minimum length
13
+ # @return [JSON::Schema::String] Returns self
14
+ def min(i)
15
+ tap { @minimum = i }
16
+ end
17
+
18
+ ##
19
+ # Constrain the string to a maximum length
20
+ # @param [Integer] i The maximum length
21
+ # @return [JSON::Schema::String] Returns self
22
+ def max(i)
23
+ tap { @maximum = i }
24
+ end
25
+
5
26
  def to_h
6
- super.merge!({type: "string"})
27
+ super.merge!({
28
+ type: "string",
29
+ minLength: @minimum,
30
+ maxLength: @maximum
31
+ }).compact
7
32
  end
8
33
  end
9
34
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module JSON
2
4
  end unless defined?(JSON)
3
5
 
data/lib/json/schema.rb CHANGED
@@ -50,35 +50,35 @@ class JSON::Schema
50
50
  ##
51
51
  # Returns a string
52
52
  # @return [JSON::Schema::String]
53
- def string(...)
54
- String.new(...)
53
+ def string
54
+ String.new
55
55
  end
56
56
 
57
57
  ##
58
58
  # Returns a number
59
59
  # @return [JSON::Schema::Number] a number
60
- def number(...)
61
- Number.new(...)
60
+ def number
61
+ Number.new
62
62
  end
63
63
 
64
64
  ##
65
65
  # Returns an integer
66
66
  # @return [JSON::Schema::Integer]
67
- def integer(...)
68
- Integer.new(...)
67
+ def integer
68
+ Integer.new
69
69
  end
70
70
 
71
71
  ##
72
72
  # Returns a boolean
73
73
  # @return [JSON::Schema::Boolean]
74
- def boolean(...)
75
- Boolean.new(...)
74
+ def boolean
75
+ Boolean.new
76
76
  end
77
77
 
78
78
  ##
79
79
  # Returns null
80
80
  # @return [JSON::Schema::Null]
81
- def null(...)
82
- Null.new(...)
81
+ def null
82
+ Null.new
83
83
  end
84
84
  end