llm.rb 0.4.1 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +134 -88
  3. data/lib/json/schema/array.rb +6 -2
  4. data/lib/json/schema/boolean.rb +4 -0
  5. data/lib/json/schema/integer.rb +23 -1
  6. data/lib/json/schema/leaf.rb +38 -0
  7. data/lib/json/schema/null.rb +4 -0
  8. data/lib/json/schema/number.rb +23 -1
  9. data/lib/json/schema/object.rb +6 -2
  10. data/lib/json/schema/string.rb +26 -1
  11. data/lib/json/schema/version.rb +8 -0
  12. data/lib/json/schema.rb +34 -23
  13. data/lib/llm/buffer.rb +28 -10
  14. data/lib/llm/chat.rb +26 -1
  15. data/lib/llm/core_ext/ostruct.rb +14 -8
  16. data/lib/llm/file.rb +6 -1
  17. data/lib/llm/function.rb +81 -0
  18. data/lib/llm/message.rb +46 -1
  19. data/lib/llm/providers/anthropic/format/completion_format.rb +73 -0
  20. data/lib/llm/providers/anthropic/format.rb +7 -33
  21. data/lib/llm/providers/anthropic/response_parser/completion_parser.rb +51 -0
  22. data/lib/llm/providers/anthropic/response_parser.rb +1 -9
  23. data/lib/llm/providers/anthropic.rb +4 -3
  24. data/lib/llm/providers/gemini/audio.rb +4 -4
  25. data/lib/llm/providers/gemini/files.rb +5 -4
  26. data/lib/llm/providers/gemini/format/completion_format.rb +54 -0
  27. data/lib/llm/providers/gemini/format.rb +28 -27
  28. data/lib/llm/providers/gemini/images.rb +9 -4
  29. data/lib/llm/providers/gemini/response_parser/completion_parser.rb +46 -0
  30. data/lib/llm/providers/gemini/response_parser.rb +13 -20
  31. data/lib/llm/providers/gemini.rb +3 -12
  32. data/lib/llm/providers/ollama/format/completion_format.rb +72 -0
  33. data/lib/llm/providers/ollama/format.rb +10 -30
  34. data/lib/llm/providers/ollama/response_parser/completion_parser.rb +42 -0
  35. data/lib/llm/providers/ollama/response_parser.rb +8 -11
  36. data/lib/llm/providers/ollama.rb +3 -11
  37. data/lib/llm/providers/openai/audio.rb +6 -6
  38. data/lib/llm/providers/openai/files.rb +3 -3
  39. data/lib/llm/providers/openai/format/completion_format.rb +81 -0
  40. data/lib/llm/providers/openai/format/respond_format.rb +69 -0
  41. data/lib/llm/providers/openai/format.rb +25 -58
  42. data/lib/llm/providers/openai/images.rb +4 -2
  43. data/lib/llm/providers/openai/response_parser/completion_parser.rb +55 -0
  44. data/lib/llm/providers/openai/response_parser/respond_parser.rb +56 -0
  45. data/lib/llm/providers/openai/response_parser.rb +8 -44
  46. data/lib/llm/providers/openai/responses.rb +10 -11
  47. data/lib/llm/providers/openai.rb +5 -16
  48. data/lib/llm/response/{output.rb → respond.rb} +2 -2
  49. data/lib/llm/response.rb +1 -1
  50. data/lib/llm/version.rb +1 -1
  51. data/lib/llm.rb +28 -0
  52. data/llm.gemspec +1 -0
  53. metadata +29 -3
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 46af7f9487355b1ad33501bdc2602f2bfbb9ac3a7a3377eb601d367167373b69
4
- data.tar.gz: f1bb4dc1b423e5335d19540c664af714d523ecded97476fb379711f2015fa93a
3
+ metadata.gz: 50bc0de3950bfbc8dd1a9ff3b211915fff5ae510f6fd0f0fc277e5cbf689e5ea
4
+ data.tar.gz: 3e48e23abefeb652429cb145be5fa1166e78cb24f3ddfb7440daff2e4a461c5d
5
5
  SHA512:
6
- metadata.gz: b249df823bedda3041df65b4f17b861d7529737855c2395dbcb978838098eecb0950ec935e06f290d9737f7b780c64a42cd21bf54e8077360947c1e70c3bad04
7
- data.tar.gz: 0a35370e5cd41488dfcbc05259c28ac906e1a3b217f4882aebfb87a8ee5b9d04ca771df9ed77d32efc0782510bc35c33db7a0704a81841ae1b8f9d832d5f3af7
6
+ metadata.gz: 8b6ad1955541753ba991a0f9b55e261cb4cc32a3a85470cee7312359cafce7d4689757a2f5efe2f641c9813dca5b7722b47ff8dc12d90f8a600eff1e3c7b5b4d
7
+ data.tar.gz: b807543c44e12d4a251370a84cda59714a9b62cf3bf031203d723a8740167aab5a4f9348ae29856b6324384f2042d1851c05c272ba3a6510b33d99d6fddf4fdc
data/README.md CHANGED
@@ -1,11 +1,31 @@
1
1
  ## About
2
2
 
3
- llm.rb is a lightweight library that provides a common interface
4
- and set of functionality for multiple Large Language Models (LLMs). It
5
- is designed to be simple, flexible, and easy to use – and it has been
6
- implemented with zero dependencies outside Ruby's standard library. See the
7
- [philosophy](#philosophy) section for more information on the design principles
8
- behind llm.rb.
3
+ llm.rb is a zero-dependency Ruby toolkit for Large Language Models like
4
+ OpenAI, Gemini, Anthropic, and more. It’s fast, clean, and composable
5
+ with full support for chat, tool calling, audio, images, files, and
6
+ JSON Schema generation.
7
+
8
+ ## Features
9
+
10
+ #### General
11
+ - ✅ Unified interface for OpenAI, Gemini, Anthropic, Ollama, and more
12
+ - 📦 Zero dependencies outside Ruby's standard library
13
+ - 🔌 Model introspection and selection
14
+ - 🚀 Optimized for performance and low memory usage
15
+
16
+ #### Chat, Agents
17
+ - 🧠 Stateless and stateful chat via completions and responses API
18
+ - 🤖 Tool calling and function execution
19
+ - 🗂️ JSON Schema support for structured, validated responses
20
+
21
+ #### Media
22
+ - 🗣️ Text-to-speech, transcription, and translation
23
+ - 🖼️ Image generation, editing, and variation support
24
+ - 📎 File uploads and prompt-aware file interaction
25
+ - 💡 Multimodal prompts (text, URLs, files)
26
+
27
+ #### Embeddings
28
+ - 🧮 Text embeddings and vector support
9
29
 
10
30
  ## Examples
11
31
 
@@ -133,7 +153,7 @@ The interface is designed so you could drop in any other library in its place:
133
153
  require "llm"
134
154
 
135
155
  llm = LLM.openai(ENV["KEY"])
136
- schema = llm.schema.object({os: llm.schema.string.enum("OpenBSD", "FreeBSD", "NetBSD").required})
156
+ schema = llm.schema.object({os: llm.schema.string.enum("OpenBSD", "FreeBSD", "NetBSD")})
137
157
  bot = LLM::Chat.new(llm, schema:)
138
158
  bot.chat "You secretly love NetBSD", :system
139
159
  bot.chat "What operating system is the best?", :user
@@ -150,6 +170,56 @@ bot.chat "Does the earth orbit the sun?", :user
150
170
  bot.messages.find(&:assistant?).content! # => {probability: 1}
151
171
  ```
152
172
 
173
+ ### Tools
174
+
175
+ #### Functions
176
+
177
+ The OpenAI, Anthropic, Gemini and Ollama providers support a powerful feature known as
178
+ tool calling, and although it is a little complex to understand at first,
179
+ it can be powerful for building agents. The following example demonstrates how we
180
+ can define a local function (which happens to be a tool), and OpenAI can
181
+ then detect when we should call the function.
182
+
183
+ The
184
+ [LLM::Chat#functions](https://0x1eef.github.io/x/llm.rb/LLM/Chat.html#functions-instance_method)
185
+ method returns an array of functions that can be called after sending a message and
186
+ it will only be populated if the LLM detects a function should be called. Each function
187
+ corresponds to an element in the "tools" array. The array is emptied after a function call,
188
+ and potentially repopulated on the next message.
189
+
190
+ The following example defines an agent that can run system commands based on natural language,
191
+ and it is only intended to be a fun demo of tool calling - it is not recommended to run
192
+ arbitrary commands from a LLM without sanitizing the input first :) Without further ado:
193
+
194
+ ```ruby
195
+ #!/usr/bin/env ruby
196
+ require "llm"
197
+
198
+ llm = LLM.openai(ENV["KEY"])
199
+ tool = LLM.function(:system) do |fn|
200
+ fn.description "Run a shell command"
201
+ fn.params do |schema|
202
+ schema.object(command: schema.string.required)
203
+ end
204
+ fn.define do |params|
205
+ system(params.command)
206
+ end
207
+ end
208
+
209
+ bot = LLM::Chat.new(llm, tools: [tool]).lazy
210
+ bot.chat "Your task is to run shell commands via a tool.", :system
211
+
212
+ bot.chat "What is the current date?", :user
213
+ bot.chat bot.functions.map(&:call) # report return value to the LLM
214
+
215
+ bot.chat "What operating system am I running? (short version please!)", :user
216
+ bot.chat bot.functions.map(&:call) # report return value to the LLM
217
+
218
+ ##
219
+ # Thu May 1 10:01:02 UTC 2025
220
+ # FreeBSD
221
+ ```
222
+
153
223
  ### Audio
154
224
 
155
225
  #### Speech
@@ -159,8 +229,7 @@ can create speech from text, transcribe audio to text, or translate
159
229
  audio to text (usually English). The following example uses the OpenAI provider
160
230
  to create an audio file from a text prompt. The audio is then moved to
161
231
  `${HOME}/hello.mp3` as the final step. As always, consult the provider's
162
- documentation (eg [OpenAI docs](https://platform.openai.com/docs/api-reference/audio/create))
163
- for more information on how to use the audio generation API:
232
+ documentation for more information on how to use the audio generation API:
164
233
 
165
234
  ```ruby
166
235
  #!/usr/bin/env ruby
@@ -177,16 +246,7 @@ The following example transcribes an audio file to text. The audio file
177
246
  (`${HOME}/hello.mp3`) was theoretically created in the previous example,
178
247
  and the result is printed to the console. The example uses the OpenAI
179
248
  provider to transcribe the audio file. As always, consult the provider's
180
- documentation (eg
181
- [OpenAI docs](https://platform.openai.com/docs/api-reference/audio/createTranscription),
182
- [Gemini docs](https://ai.google.dev/gemini-api/docs/audio))
183
- for more information on how to use the audio transcription API.
184
-
185
- Please also see provider-specific documentation for more provider-specific
186
- examples and documentation
187
- (eg
188
- [LLM::Gemini::Audio](https://0x1eef.github.io/x/llm.rb/LLM/Gemini/Audio.html),
189
- [LLM::OpenAI::Audio](https://0x1eef.github.io/x/llm.rb/LLM/OpenAI/Audio.html)):
249
+ documentation for more information on how to use the audio transcription API:
190
250
 
191
251
  ```ruby
192
252
  #!/usr/bin/env ruby
@@ -194,7 +254,7 @@ require "llm"
194
254
 
195
255
  llm = LLM.openai(ENV["KEY"])
196
256
  res = llm.audio.create_transcription(
197
- file: LLM::File(File.join(Dir.home, "hello.mp3"))
257
+ file: File.join(Dir.home, "hello.mp3")
198
258
  )
199
259
  print res.text, "\n" # => "Hello world."
200
260
  ```
@@ -205,17 +265,8 @@ The following example translates an audio file to text. In this example
205
265
  the audio file (`${HOME}/bomdia.mp3`) is theoretically in Portuguese,
206
266
  and it is translated to English. The example uses the OpenAI provider,
207
267
  and at the time of writing, it can only translate to English. As always,
208
- consult the provider's documentation (eg
209
- [OpenAI docs](https://platform.openai.com/docs/api-reference/audio/createTranslation),
210
- [Gemini docs](https://ai.google.dev/gemini-api/docs/audio))
211
- for more information on how to use the audio translation API.
212
-
213
- Please also see provider-specific documentation for more provider-specific
214
- examples and documentation
215
- (eg
216
- [LLM::Gemini::Audio](https://0x1eef.github.io/x/llm.rb/LLM/Gemini/Audio.html),
217
- [LLM::OpenAI::Audio](https://0x1eef.github.io/x/llm.rb/LLM/OpenAI/Audio.html)):
218
-
268
+ consult the provider's documentation for more information on how to use
269
+ the audio translation API:
219
270
 
220
271
  ```ruby
221
272
  #!/usr/bin/env ruby
@@ -223,7 +274,7 @@ require "llm"
223
274
 
224
275
  llm = LLM.openai(ENV["KEY"])
225
276
  res = llm.audio.create_translation(
226
- file: LLM::File(File.join(Dir.home, "bomdia.mp3"))
277
+ file: File.join(Dir.home, "bomdia.mp3")
227
278
  )
228
279
  print res.text, "\n" # => "Good morning."
229
280
  ```
@@ -236,13 +287,7 @@ Some but not all LLM providers implement image generation capabilities that
236
287
  can create new images from a prompt, or edit an existing image with a
237
288
  prompt. The following example uses the OpenAI provider to create an
238
289
  image of a dog on a rocket to the moon. The image is then moved to
239
- `${HOME}/dogonrocket.png` as the final step.
240
-
241
- Please also see provider-specific documentation for more provider-specific
242
- examples and documentation
243
- (eg
244
- [LLM::Gemini::Images](https://0x1eef.github.io/x/llm.rb/LLM/Gemini/Images.html),
245
- [LLM::OpenAI::Images](https://0x1eef.github.io/x/llm.rb/LLM/OpenAI/Images.html)):
290
+ `${HOME}/dogonrocket.png` as the final step:
246
291
 
247
292
  ```ruby
248
293
  #!/usr/bin/env ruby
@@ -266,17 +311,8 @@ now wearing a hat. The image is then moved to `${HOME}/catwithhat.png` as
266
311
  the final step.
267
312
 
268
313
  Results and quality may vary, consider prompt adjustments if the results
269
- are not satisfactory, and consult the provider's documentation
270
- (eg
271
- [OpenAI docs](https://platform.openai.com/docs/api-reference/images/createEdit),
272
- [Gemini docs](https://ai.google.dev/gemini-api/docs/image-generation))
273
- for more information on how to use the image editing API.
274
-
275
- Please also see provider-specific documentation for more provider-specific
276
- examples and documentation
277
- (eg
278
- [LLM::Gemini::Images](https://0x1eef.github.io/x/llm.rb/LLM/Gemini/Images.html),
279
- [LLM::OpenAI::Images](https://0x1eef.github.io/x/llm.rb/LLM/OpenAI/Images.html)):
314
+ are not as expected, and consult the provider's documentation
315
+ for more information on how to use the image editing API:
280
316
 
281
317
  ```ruby
282
318
  #!/usr/bin/env ruby
@@ -286,7 +322,7 @@ require "fileutils"
286
322
 
287
323
  llm = LLM.openai(ENV["KEY"])
288
324
  res = llm.images.edit(
289
- image: LLM::File("/images/cat.png"),
325
+ image: "/images/cat.png",
290
326
  prompt: "a cat with a hat",
291
327
  )
292
328
  res.urls.each do |url|
@@ -300,9 +336,8 @@ end
300
336
  The following example is focused on creating variations of a local image.
301
337
  The image (`/images/cat.png`) is returned to us with five different variations.
302
338
  The images are then moved to `${HOME}/catvariation0.png`, `${HOME}/catvariation1.png`
303
- and so on as the final step. Consult the provider's documentation
304
- (eg [OpenAI docs](https://platform.openai.com/docs/api-reference/images/createVariation))
305
- for more information on how to use the image variations API:
339
+ and so on as the final step. Consult the provider's documentation for more information
340
+ on how to use the image variations API:
306
341
 
307
342
  ```ruby
308
343
  #!/usr/bin/env ruby
@@ -312,7 +347,7 @@ require "fileutils"
312
347
 
313
348
  llm = LLM.openai(ENV["KEY"])
314
349
  res = llm.images.create_variation(
315
- image: LLM::File("/images/cat.png"),
350
+ image: "/images/cat.png",
316
351
  n: 5
317
352
  )
318
353
  res.urls.each.with_index do |url, index|
@@ -331,13 +366,8 @@ for this feature. The following example uses the OpenAI provider to describe
331
366
  the contents of a PDF file after it has been uploaded. The file (an instance
332
367
  of [LLM::Response::File](https://0x1eef.github.io/x/llm.rb/LLM/Response/File.html))
333
368
  is passed directly to the chat method, and generally any object a prompt supports
334
- can be given to the chat method.
369
+ can be given to the chat method:
335
370
 
336
- Please also see provider-specific documentation for more provider-specific
337
- examples and documentation
338
- (eg
339
- [LLM::Gemini::Files](https://0x1eef.github.io/x/llm.rb/LLM/Gemini/Files.html),
340
- [LLM::OpenAI::Files](https://0x1eef.github.io/x/llm.rb/LLM/OpenAI/Files.html)):
341
371
 
342
372
  ```ruby
343
373
  #!/usr/bin/env ruby
@@ -345,7 +375,7 @@ require "llm"
345
375
 
346
376
  llm = LLM.openai(ENV["KEY"])
347
377
  bot = LLM::Chat.new(llm).lazy
348
- file = llm.files.create(file: LLM::File("/documents/openbsd_is_awesome.pdf"))
378
+ file = llm.files.create(file: "/documents/openbsd_is_awesome.pdf")
349
379
  bot.chat(file)
350
380
  bot.chat("What is this file about?")
351
381
  bot.messages.select(&:assistant?).each { print "[#{_1.role}] ", _1.content, "\n" }
@@ -364,17 +394,11 @@ bot.messages.select(&:assistant?).each { print "[#{_1.role}] ", _1.content, "\n"
364
394
  Generally all providers accept text prompts but some providers can
365
395
  also understand URLs, and various file types (eg images, audio, video,
366
396
  etc). The llm.rb approach to multimodal prompts is to let you pass `URI`
367
- objects to describe links, `LLM::File` / `LLM::Response::File` objects
397
+ objects to describe links, `LLM::File` | `LLM::Response::File` objects
368
398
  to describe files, `String` objects to describe text blobs, or an array
369
399
  of the aforementioned objects to describe multiple objects in a single
370
400
  prompt. Each object is a first class citizen that can be passed directly
371
- to a prompt.
372
-
373
- For more depth and examples on how to use the multimodal API, please see
374
- the [provider-specific documentation](https://0x1eef.github.io/x/llm.rb/)
375
- for more provider-specific examples – there can be subtle differences
376
- between providers and even between APIs from the same provider that are
377
- not covered in the README:
401
+ to a prompt:
378
402
 
379
403
  ```ruby
380
404
  #!/usr/bin/env ruby
@@ -383,19 +407,17 @@ require "llm"
383
407
  llm = LLM.openai(ENV["KEY"])
384
408
  bot = LLM::Chat.new(llm).lazy
385
409
 
386
- bot.chat URI("https://example.com/path/to/image.png")
387
- bot.chat "Describe the above image"
410
+ bot.chat [URI("https://example.com/path/to/image.png"), "Describe the image in the link"]
388
411
  bot.messages.select(&:assistant?).each { print "[#{_1.role}] ", _1.content, "\n" }
389
412
 
390
- file = bot.files.create(file: LLM::File("/documents/openbsd_is_awesome.pdf"))
391
- bot.chat file
392
- bot.chat "What is this file about?"
413
+ file = llm.files.create(file: "/documents/openbsd_is_awesome.pdf")
414
+ bot.chat [file, "What is this file about?"]
393
415
  bot.messages.select(&:assistant?).each { print "[#{_1.role}] ", _1.content, "\n" }
394
416
 
395
- bot.chat [LLM::File("/images/puffy.png"), "What is this image about?"]
417
+ bot.chat [LLM.File("/images/puffy.png"), "What is this image about?"]
396
418
  bot.messages.select(&:assistant?).each { print "[#{_1.role}] ", _1.content, "\n" }
397
419
 
398
- bot.chat [LLM::File("/images/beastie.png"), "What is this image about?"]
420
+ bot.chat [LLM.File("/images/beastie.png"), "What is this image about?"]
399
421
  bot.messages.select(&:assistant?).each { print "[#{_1.role}] ", _1.content, "\n" }
400
422
  ```
401
423
 
@@ -497,6 +519,26 @@ over or doesn't cover at all. The API reference is available at
497
519
  [0x1eef.github.io/x/llm.rb](https://0x1eef.github.io/x/llm.rb).
498
520
 
499
521
 
522
+ ### See also
523
+
524
+ #### Gemini
525
+
526
+ * [LLM::Gemini](https://0x1eef.github.io/x/llm.rb/LLM/Gemini.html)
527
+ * [LLM::Gemini::Images](https://0x1eef.github.io/x/llm.rb/LLM/Gemini/Images.html)
528
+ * [LLM::Gemini::Audio](https://0x1eef.github.io/x/llm.rb/LLM/Gemini/Audio.html)
529
+
530
+ #### OpenAI
531
+
532
+ * [LLM::OpenAI](https://0x1eef.github.io/x/llm.rb/LLM/OpenAI.html)
533
+ * [LLM::OpenAI::Images](https://0x1eef.github.io/x/llm.rb/LLM/OpenAI/Images.html)
534
+ * [LLM::OpenAI::Audio](https://0x1eef.github.io/x/llm.rb/LLM/OpenAI/Audio.html)
535
+
536
+ #### Anthropic
537
+ * [LLM::Anthropic](https://0x1eef.github.io/x/llm.rb/LLM/Anthropic.html)
538
+
539
+ #### Ollama
540
+ * [LLM::Ollama](https://0x1eef.github.io/x/llm.rb/LLM/Ollama.html)
541
+
500
542
  ## Install
501
543
 
502
544
  llm.rb can be installed via rubygems.org:
@@ -505,17 +547,21 @@ llm.rb can be installed via rubygems.org:
505
547
 
506
548
  ## Philosophy
507
549
 
508
- llm.rb was built for developers who believe that simplicity can be challenging
509
- but it is always worth it. It provides a clean, dependency-free interface to
510
- Large Language Models, treating Ruby itself as the primary platform –
511
- not Rails or any other specific framework or library. There is no hidden
512
- magic or complex metaprogramming.
513
-
514
- Every part of llm.rb is designed to be explicit, composable, memory-safe,
515
- and production-ready without compromise. No unnecessary abstractions,
516
- no global configuration, no global state, and no dependencies that aren't
517
- part of standard Ruby. It has been inspired in part by other languages such
518
- as Python, but it is not a port of any other library.
550
+ llm.rb provides a clean, dependency-free interface to Large Language Models,
551
+ treating Ruby itself not Rails or any specific framework as the primary platform.
552
+ It avoids hidden magic, complex metaprogramming, and heavy DSLs. It is intentionally
553
+ simple and won't compromise on being a simple library, even if that means saying no to
554
+ certain features.
555
+
556
+ Instead, it embraces a general-purpose, object-oriented design that prioritizes
557
+ explicitness, composability, and clarity. Code should be easy to follow, test, and adapt.
558
+ For that reason we favor small, cooperating objects over deeply nested blocks — a pattern
559
+ that often emerges in DSL-heavy libraries.
560
+
561
+ Each part of llm.rb is designed to be conscious of memory, ready for production, and free
562
+ from global state or non-standard dependencies. While inspired by ideas from other ecosystems
563
+ (especially Python) it is not a port of any other library — it is a Ruby library written
564
+ by Rubyists who value borrowing good ideas from other languages and ecosystems.
519
565
 
520
566
  ## License
521
567
 
@@ -1,10 +1,14 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class JSON::Schema
4
+ ##
5
+ # The {JSON::Schema::Array JSON::Schema::Array} class represents an
6
+ # array value in a JSON schema. It is a subclass of
7
+ # {JSON::Schema::Leaf JSON::Schema::Leaf} and provides methods that
8
+ # can act as constraints.
4
9
  class Array < Leaf
5
- def initialize(items, **rest)
10
+ def initialize(*items)
6
11
  @items = items
7
- super(**rest)
8
12
  end
9
13
 
10
14
  def to_h
@@ -1,6 +1,10 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class JSON::Schema
4
+ ##
5
+ # The {JSON::Schema::Boolean JSON::Schema::Boolean} class represents a
6
+ # boolean value in a JSON schema. It is a subclass of
7
+ # {JSON::Schema::Leaf JSON::Schema::Leaf}.
4
8
  class Booelean < Leaf
5
9
  def to_h
6
10
  super.merge!({type: "boolean"})
@@ -1,20 +1,42 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class JSON::Schema
4
+ ##
5
+ # The {JSON::Schema::Integer JSON::Schema::Integer} class represents a
6
+ # whole number value in a JSON schema. It is a subclass of
7
+ # {JSON::Schema::Leaf JSON::Schema::Leaf} and provides methods that
8
+ # can act as constraints.
4
9
  class Integer < Leaf
10
+ ##
11
+ # Constrain the number to a minimum value
12
+ # @param [Integer] i The minimum value
13
+ # @return [JSON::Schema::Number] Returns self
5
14
  def min(i)
6
15
  tap { @minimum = i }
7
16
  end
8
17
 
18
+ ##
19
+ # Constrain the number to a maximum value
20
+ # @param [Integer] i The maximum value
21
+ # @return [JSON::Schema::Number] Returns self
9
22
  def max(i)
10
23
  tap { @maximum = i }
11
24
  end
12
25
 
26
+ ##
27
+ # Constrain the number to a multiple of a given value
28
+ # @param [Integer] i The multiple
29
+ # @return [JSON::Schema::Number] Returns self
30
+ def multiple_of(i)
31
+ tap { @multiple_of = i }
32
+ end
33
+
13
34
  def to_h
14
35
  super.merge!({
15
36
  type: "integer",
16
37
  minimum: @minimum,
17
- maximum: @maximum
38
+ maximum: @maximum,
39
+ multipleOf: @multiple_of
18
40
  }).compact
19
41
  end
20
42
  end
@@ -1,38 +1,76 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class JSON::Schema
4
+ ##
5
+ # The {JSON::Schema::Leaf JSON::Schema::Leaf} class is the
6
+ # superclass of all values that can appear in a JSON schema.
7
+ # See the instance methods of {JSON::Schema JSON::Schema} for
8
+ # an example of how to create instances of {JSON::Schema::Leaf JSON::Schema::Leaf}
9
+ # through its subclasses.
4
10
  class Leaf
5
11
  def initialize
6
12
  @description = nil
7
13
  @default = nil
8
14
  @enum = nil
9
15
  @required = nil
16
+ @const = nil
10
17
  end
11
18
 
19
+ ##
20
+ # Set the description of a leaf
21
+ # @param [String] str The description
22
+ # @return [JSON::Schema::Leaf]
12
23
  def description(str)
13
24
  tap { @description = str }
14
25
  end
15
26
 
27
+ ##
28
+ # Set the default value of a leaf
29
+ # @param [Object] value The default value
30
+ # @return [JSON::Schema::Leaf]
16
31
  def default(value)
17
32
  tap { @default = value }
18
33
  end
19
34
 
35
+ ##
36
+ # Set the allowed values of a leaf
37
+ # @see https://tour.json-schema.org/content/02-Primitive-Types/07-Enumerated-Values-II Enumerated Values
38
+ # @param [Array] values The allowed values
39
+ # @return [JSON::Schema::Leaf]
20
40
  def enum(*values)
21
41
  tap { @enum = values }
22
42
  end
23
43
 
44
+ ##
45
+ # Set the value of a leaf to be a constant value
46
+ # @see https://tour.json-schema.org/content/02-Primitive-Types/08-Defining-Constant-Values Constant Values
47
+ # @param [Object] value The constant value
48
+ # @return [JSON::Schema::Leaf]
49
+ def const(value)
50
+ tap { @const = value }
51
+ end
52
+
53
+ ##
54
+ # Denote a leaf as required
55
+ # @return [JSON::Schema::Leaf]
24
56
  def required
25
57
  tap { @required = true }
26
58
  end
27
59
 
60
+ ##
61
+ # @return [Hash]
28
62
  def to_h
29
63
  {description: @description, default: @default, enum: @enum}.compact
30
64
  end
31
65
 
66
+ ##
67
+ # @return [String]
32
68
  def to_json(options = {})
33
69
  to_h.to_json(options)
34
70
  end
35
71
 
72
+ ##
73
+ # @return [Boolean]
36
74
  def required?
37
75
  @required
38
76
  end
@@ -1,6 +1,10 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class JSON::Schema
4
+ ##
5
+ # The {JSON::Schema::Null JSON::Schema::Null} class represents a
6
+ # null value in a JSON schema. It is a subclass of
7
+ # {JSON::Schema::Leaf JSON::Schema::Leaf}.
4
8
  class Null < Leaf
5
9
  def to_h
6
10
  super.merge!({type: "null"})
@@ -1,20 +1,42 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class JSON::Schema
4
+ ##
5
+ # The {JSON::Schema::Number JSON::Schema::Number} class represents a
6
+ # a number (either whole or decimal) value in a JSON schema. It is a
7
+ # subclass of {JSON::Schema::Leaf JSON::Schema::Leaf} and provides
8
+ # methods that can act as constraints.
4
9
  class Number < Leaf
10
+ ##
11
+ # Constrain the number to a minimum value
12
+ # @param [Integer, Float] i The minimum value
13
+ # @return [JSON::Schema::Number] Returns self
5
14
  def min(i)
6
15
  tap { @minimum = i }
7
16
  end
8
17
 
18
+ ##
19
+ # Constrain the number to a maximum value
20
+ # @param [Integer, Float] i The maximum value
21
+ # @return [JSON::Schema::Number] Returns self
9
22
  def max(i)
10
23
  tap { @maximum = i }
11
24
  end
12
25
 
26
+ ##
27
+ # Constrain the number to a multiple of a given value
28
+ # @param [Integer, Float] i The multiple
29
+ # @return [JSON::Schema::Number] Returns self
30
+ def multiple_of(i)
31
+ tap { @multiple_of = i }
32
+ end
33
+
13
34
  def to_h
14
35
  super.merge!({
15
36
  type: "number",
16
37
  minimum: @minimum,
17
- maximum: @maximum
38
+ maximum: @maximum,
39
+ multipleOf: @multiple_of
18
40
  }).compact
19
41
  end
20
42
  end
@@ -1,10 +1,14 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class JSON::Schema
4
+ ##
5
+ # The {JSON::Schema::Object JSON::Schema::Object} class represents an
6
+ # object value in a JSON schema. It is a subclass of
7
+ # {JSON::Schema::Leaf JSON::Schema::Leaf} and provides methods that
8
+ # can act as constraints.
4
9
  class Object < Leaf
5
- def initialize(properties, **rest)
10
+ def initialize(properties)
6
11
  @properties = properties
7
- super(**rest)
8
12
  end
9
13
 
10
14
  def to_h
@@ -1,9 +1,34 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class JSON::Schema
4
+ ##
5
+ # The {JSON::Schema::String JSON::Schema::String} class represents a
6
+ # string value in a JSON schema. It is a subclass of
7
+ # {JSON::Schema::Leaf JSON::Schema::Leaf} and provides methods that
8
+ # can act as constraints.
4
9
  class String < Leaf
10
+ ##
11
+ # Constrain the string to a minimum length
12
+ # @param [Integer] i The minimum length
13
+ # @return [JSON::Schema::String] Returns self
14
+ def min(i)
15
+ tap { @minimum = i }
16
+ end
17
+
18
+ ##
19
+ # Constrain the string to a maximum length
20
+ # @param [Integer] i The maximum length
21
+ # @return [JSON::Schema::String] Returns self
22
+ def max(i)
23
+ tap { @maximum = i }
24
+ end
25
+
5
26
  def to_h
6
- super.merge!({type: "string"})
27
+ super.merge!({
28
+ type: "string",
29
+ minLength: @minimum,
30
+ maxLength: @maximum
31
+ }).compact
7
32
  end
8
33
  end
9
34
  end
@@ -0,0 +1,8 @@
1
+ # frozen_string_literal: true
2
+
3
+ module JSON
4
+ end unless defined?(JSON)
5
+
6
+ class JSON::Schema
7
+ VERSION = "0.1.0"
8
+ end