light-openai-lib 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE.txt +21 -0
- data/README.md +81 -0
- data/examples/chat.rb +26 -0
- data/lib/base64.rb +16 -0
- data/lib/bigdecimal.rb +16 -0
- data/lib/light/openai/base64_fallback.rb +13 -0
- data/lib/light/openai/bigdecimal_fallback.rb +75 -0
- data/lib/light/openai/client.rb +135 -0
- data/lib/light/openai/version.rb +7 -0
- data/lib/light/openai.rb +13 -0
- data/lib/openai_client.rb +3 -0
- metadata +147 -0
checksums.yaml
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
---
|
|
2
|
+
SHA256:
|
|
3
|
+
metadata.gz: ef80a78645addf631aca9e546f1a94b286c20e455db25c5fd6e11cb02afa40e5
|
|
4
|
+
data.tar.gz: abcc70421db1ab139dda1af35927ed6f658adbb770f9074f87c3652c9001f871
|
|
5
|
+
SHA512:
|
|
6
|
+
metadata.gz: b978a4668d93bb222d1facd514a40481e2044c13eba8224cc4cfd325cf733c83a6738921f8c5bf3fe0356efc0b0edc2f166c7971ef432c7218234d202a07c03b
|
|
7
|
+
data.tar.gz: a4b7eaad0b060ad4d63aafeecb33a7466576f979208620926bc57ff70bd4bb49d6a405c0e88b851b7cbc969224b3fc33fc10686509259d8920100f70f4bcc11f
|
data/LICENSE.txt
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024 Gaetan Juvin
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
data/README.md
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
# Light OpenAI Lib
|
|
2
|
+
|
|
3
|
+
A tiny, dependency-light wrapper for the OpenAI Chat Completions API. It extracts the reusable client that powers several of my Ruby projects and packages it as a gem so it can be reused without copy/paste.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
Add the gem to your Gemfile:
|
|
8
|
+
|
|
9
|
+
```ruby
|
|
10
|
+
gem "light-openai-lib"
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
and run `bundle install`.
|
|
14
|
+
|
|
15
|
+
If you are not using Bundler install it directly:
|
|
16
|
+
|
|
17
|
+
```sh
|
|
18
|
+
gem install light-openai-lib
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
## Usage
|
|
22
|
+
|
|
23
|
+
```ruby
|
|
24
|
+
require "light/openai"
|
|
25
|
+
|
|
26
|
+
client = Light::OpenAI::Client.new(
|
|
27
|
+
api_key: ENV.fetch("OPENAI_API_KEY"),
|
|
28
|
+
logger: Logger.new($stdout)
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
response = client.chat(
|
|
32
|
+
model: "gpt-4o-mini",
|
|
33
|
+
messages: [
|
|
34
|
+
{ role: "system", content: "You are a helpful assistant." },
|
|
35
|
+
{ role: "user", content: "Tell me a joke about Ruby." }
|
|
36
|
+
],
|
|
37
|
+
temperature: 0.4
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
puts response.dig("choices", 0, "message", "content")
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
Environment variables control sane defaults:
|
|
44
|
+
|
|
45
|
+
| Variable | Purpose | Default |
|
|
46
|
+
| --- | --- | --- |
|
|
47
|
+
| `OPENAI_CHAT_COMPLETIONS_URL` | API base URL | `https://api.openai.com/v1/chat/completions` |
|
|
48
|
+
| `OPENAI_HTTP_TIMEOUT` | Request timeout in seconds | `900` |
|
|
49
|
+
| `OPENAI_HTTP_RETRIES` | Number of retry attempts | `2` |
|
|
50
|
+
| `OPENAI_HTTP_RETRY_INTERVAL` | Initial retry delay (exponential backoff) | `1.5` |
|
|
51
|
+
|
|
52
|
+
You can also pass `response_format:` to `#chat` to opt into JSON mode. If the OpenAI API responds with a `response_format` error the client automatically falls back to a standard request and returns the first success payload.
|
|
53
|
+
|
|
54
|
+
## Examples
|
|
55
|
+
|
|
56
|
+
This repo carries runnable scripts under `examples/`. The simplest one invokes the client with your OpenAI key:
|
|
57
|
+
|
|
58
|
+
```sh
|
|
59
|
+
OPENAI_API_KEY=sk-... bundle exec ruby examples/chat.rb "Explain compounding interest like I'm 12"
|
|
60
|
+
```
|
|
61
|
+
|
|
62
|
+
## Compatibility
|
|
63
|
+
|
|
64
|
+
- Ruby 3.0+
|
|
65
|
+
- No runtime dependencies besides [`httparty`](https://github.com/jnunemaker/httparty)
|
|
66
|
+
|
|
67
|
+
## Development
|
|
68
|
+
|
|
69
|
+
After checking out the repo, run:
|
|
70
|
+
|
|
71
|
+
```sh
|
|
72
|
+
bundle install
|
|
73
|
+
bundle exec rake test
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
`bin/console` gives you a REPL with the gem loaded. Update the version in `lib/light/openai/version.rb` before publishing, then build and push:
|
|
77
|
+
|
|
78
|
+
```sh
|
|
79
|
+
bundle exec rake build
|
|
80
|
+
gem push pkg/light-openai-lib-<version>.gem
|
|
81
|
+
```
|
data/examples/chat.rb
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
#!/usr/bin/env ruby
|
|
2
|
+
# frozen_string_literal: true
|
|
3
|
+
|
|
4
|
+
require "bundler/setup"
|
|
5
|
+
require "light/openai"
|
|
6
|
+
require "logger"
|
|
7
|
+
|
|
8
|
+
abort "Set OPENAI_API_KEY before running." unless ENV["OPENAI_API_KEY"]
|
|
9
|
+
|
|
10
|
+
prompt = ARGV.empty? ? "Say hello like a pirate." : ARGV.join(" ")
|
|
11
|
+
model = ENV.fetch("OPENAI_MODEL", "gpt-4o-mini")
|
|
12
|
+
|
|
13
|
+
client = Light::OpenAI::Client.new(
|
|
14
|
+
api_key: ENV.fetch("OPENAI_API_KEY"),
|
|
15
|
+
logger: Logger.new($stdout)
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
messages = [
|
|
19
|
+
{ role: "system", content: "You are a friendly assistant." },
|
|
20
|
+
{ role: "user", content: prompt }
|
|
21
|
+
]
|
|
22
|
+
|
|
23
|
+
response = client.chat(model: model, messages: messages, temperature: 0.6)
|
|
24
|
+
|
|
25
|
+
content = response.dig("choices", 0, "message", "content")
|
|
26
|
+
puts(content || "<no response>")
|
data/lib/base64.rb
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
native_loaded = false
|
|
4
|
+
|
|
5
|
+
begin
|
|
6
|
+
require "rbconfig"
|
|
7
|
+
native_file = File.join(RbConfig::CONFIG["rubylibdir"], "base64.rb")
|
|
8
|
+
if native_file && File.exist?(native_file)
|
|
9
|
+
load native_file
|
|
10
|
+
native_loaded = true
|
|
11
|
+
end
|
|
12
|
+
rescue LoadError
|
|
13
|
+
native_loaded = false
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
require_relative "light/openai/base64_fallback" unless native_loaded
|
data/lib/bigdecimal.rb
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
native_loaded = false
|
|
4
|
+
|
|
5
|
+
begin
|
|
6
|
+
require "rbconfig"
|
|
7
|
+
native_file = File.join(RbConfig::CONFIG["rubylibdir"], "bigdecimal.rb")
|
|
8
|
+
if native_file && File.exist?(native_file)
|
|
9
|
+
load native_file
|
|
10
|
+
native_loaded = true
|
|
11
|
+
end
|
|
12
|
+
rescue LoadError
|
|
13
|
+
native_loaded = false
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
require_relative "light/openai/bigdecimal_fallback" unless native_loaded
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
class BigDecimal
|
|
4
|
+
def initialize(value, _precision = 0)
|
|
5
|
+
@value = value
|
|
6
|
+
end
|
|
7
|
+
|
|
8
|
+
def to_f
|
|
9
|
+
Float(@value)
|
|
10
|
+
rescue ArgumentError
|
|
11
|
+
0.0
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def to_s(*)
|
|
15
|
+
@value.to_s
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
def to_i
|
|
19
|
+
to_f.to_i
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
def to_r
|
|
23
|
+
@value.to_r
|
|
24
|
+
rescue NoMethodError
|
|
25
|
+
Rational(to_f)
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
def coerce(other)
|
|
29
|
+
[self.class.new(other), self]
|
|
30
|
+
rescue ArgumentError, TypeError
|
|
31
|
+
[other.to_f, to_f]
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
def ==(other)
|
|
35
|
+
to_f == other.to_f
|
|
36
|
+
rescue NoMethodError
|
|
37
|
+
false
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
def +(other)
|
|
41
|
+
self.class.new(to_f + other.to_f)
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
def -(other)
|
|
45
|
+
self.class.new(to_f - other.to_f)
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
def *(other)
|
|
49
|
+
self.class.new(to_f * other.to_f)
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
def /(other)
|
|
53
|
+
self.class.new(to_f / other.to_f)
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
def method_missing(method_name, *args, &block)
|
|
57
|
+
if to_f.respond_to?(method_name)
|
|
58
|
+
to_f.public_send(method_name, *args, &block)
|
|
59
|
+
else
|
|
60
|
+
super
|
|
61
|
+
end
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
def respond_to_missing?(method_name, include_private = false)
|
|
65
|
+
to_f.respond_to?(method_name, include_private) || super
|
|
66
|
+
end
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
module Kernel
|
|
70
|
+
module_function
|
|
71
|
+
|
|
72
|
+
def BigDecimal(value, precision = 0)
|
|
73
|
+
::BigDecimal.new(value, precision)
|
|
74
|
+
end
|
|
75
|
+
end
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "base64"
|
|
4
|
+
require "bigdecimal"
|
|
5
|
+
require "json"
|
|
6
|
+
require "httparty"
|
|
7
|
+
require "net/http"
|
|
8
|
+
|
|
9
|
+
module Light
|
|
10
|
+
module OpenAI
|
|
11
|
+
# Minimal chat client that wraps the OpenAI API with retry/backoff logic.
|
|
12
|
+
class Client
|
|
13
|
+
DEFAULT_API_URL = ENV.fetch("OPENAI_CHAT_COMPLETIONS_URL", "https://api.openai.com/v1/chat/completions").freeze
|
|
14
|
+
DEFAULT_TIMEOUT = Integer(ENV.fetch("OPENAI_HTTP_TIMEOUT", 900))
|
|
15
|
+
DEFAULT_RETRIES = Integer(ENV.fetch("OPENAI_HTTP_RETRIES", 2))
|
|
16
|
+
DEFAULT_RETRY_INTERVAL = Float(ENV.fetch("OPENAI_HTTP_RETRY_INTERVAL", 1.5))
|
|
17
|
+
|
|
18
|
+
def initialize(api_key:, api_url: DEFAULT_API_URL, timeout: DEFAULT_TIMEOUT, retries: DEFAULT_RETRIES, retry_interval: DEFAULT_RETRY_INTERVAL, logger: nil)
|
|
19
|
+
raise ArgumentError, "api_key is required for OpenAIClient" if api_key.nil? || api_key.empty?
|
|
20
|
+
|
|
21
|
+
@api_key = api_key
|
|
22
|
+
@api_url = api_url
|
|
23
|
+
@timeout = timeout
|
|
24
|
+
@retries = [retries.to_i, 0].max
|
|
25
|
+
@retry_interval = [retry_interval.to_f, 0].max
|
|
26
|
+
@logger = logger
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
def chat(model:, messages:, temperature: 1, response_format: nil)
|
|
30
|
+
payload = {
|
|
31
|
+
model: model,
|
|
32
|
+
temperature: temperature,
|
|
33
|
+
messages: messages
|
|
34
|
+
}
|
|
35
|
+
payload[:response_format] = response_format if response_format
|
|
36
|
+
|
|
37
|
+
request(payload: payload, allow_fallback: !response_format.nil?)
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
private
|
|
41
|
+
|
|
42
|
+
def request(payload:, allow_fallback:)
|
|
43
|
+
attempts = 0
|
|
44
|
+
max_attempts = @retries + 1
|
|
45
|
+
|
|
46
|
+
loop do
|
|
47
|
+
attempts += 1
|
|
48
|
+
log_debug("POST #{@api_url} model=#{payload[:model]} (attempt #{attempts}/#{max_attempts})")
|
|
49
|
+
|
|
50
|
+
response = nil
|
|
51
|
+
begin
|
|
52
|
+
response = HTTParty.post(
|
|
53
|
+
@api_url,
|
|
54
|
+
headers: {
|
|
55
|
+
"Authorization" => "Bearer #{@api_key}",
|
|
56
|
+
"Content-Type" => "application/json"
|
|
57
|
+
},
|
|
58
|
+
body: JSON.dump(payload),
|
|
59
|
+
timeout: @timeout
|
|
60
|
+
)
|
|
61
|
+
rescue HTTParty::Error, SocketError, Errno::ECONNREFUSED, Net::OpenTimeout, Net::ReadTimeout => e
|
|
62
|
+
if attempts < max_attempts
|
|
63
|
+
wait = retry_delay(attempts)
|
|
64
|
+
log_warn("OpenAI API request failed (#{e.class} #{e.message}) on attempt #{attempts}/#{max_attempts}; retrying in #{format('%.2f', wait)}s")
|
|
65
|
+
sleep(wait)
|
|
66
|
+
next
|
|
67
|
+
end
|
|
68
|
+
raise(StandardError, "OpenAI API request failed after #{attempts} attempts: #{e.class} #{e.message}")
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
if retryable_status?(response.code) && attempts < max_attempts
|
|
72
|
+
wait = retry_delay(attempts)
|
|
73
|
+
log_warn("OpenAI API HTTP #{response.code} on attempt #{attempts}/#{max_attempts}; retrying in #{format('%.2f', wait)}s")
|
|
74
|
+
sleep(wait)
|
|
75
|
+
next
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
body = nil
|
|
79
|
+
begin
|
|
80
|
+
body = parse_body(response.body)
|
|
81
|
+
rescue JSON::ParserError => e
|
|
82
|
+
if attempts < max_attempts
|
|
83
|
+
wait = retry_delay(attempts)
|
|
84
|
+
log_warn("Failed to parse OpenAI response on attempt #{attempts}/#{max_attempts}: #{e.message}. Retrying in #{format('%.2f', wait)}s")
|
|
85
|
+
sleep(wait)
|
|
86
|
+
next
|
|
87
|
+
end
|
|
88
|
+
raise(StandardError, "failed to parse OpenAI API response: #{e.message}")
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
if response.code == 400 && allow_fallback
|
|
92
|
+
message = body.dig("error", "message").to_s
|
|
93
|
+
if message.match?(/response_format/i)
|
|
94
|
+
log_warn("response_format unsupported, retrying without JSON schema: #{message}")
|
|
95
|
+
fallback_payload = payload.dup
|
|
96
|
+
fallback_payload.delete(:response_format)
|
|
97
|
+
return request(payload: fallback_payload, allow_fallback: false)
|
|
98
|
+
end
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
unless response.code.between?(200, 299)
|
|
102
|
+
error_message = body.dig("error", "message") || response.body
|
|
103
|
+
raise(StandardError, "OpenAI API error (#{response.code}): #{error_message}")
|
|
104
|
+
end
|
|
105
|
+
|
|
106
|
+
return body
|
|
107
|
+
end
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
def log_debug(message)
|
|
111
|
+
@logger&.debug(message)
|
|
112
|
+
end
|
|
113
|
+
|
|
114
|
+
def log_warn(message)
|
|
115
|
+
@logger&.warn(message)
|
|
116
|
+
end
|
|
117
|
+
|
|
118
|
+
def retryable_status?(code)
|
|
119
|
+
return false if code.nil?
|
|
120
|
+
|
|
121
|
+
code >= 500 || code == 429 || code == 408
|
|
122
|
+
end
|
|
123
|
+
|
|
124
|
+
def retry_delay(attempt)
|
|
125
|
+
@retry_interval * (2**(attempt - 1))
|
|
126
|
+
end
|
|
127
|
+
|
|
128
|
+
def parse_body(raw_body)
|
|
129
|
+
return {} if raw_body.to_s.strip.empty?
|
|
130
|
+
|
|
131
|
+
JSON.parse(raw_body)
|
|
132
|
+
end
|
|
133
|
+
end
|
|
134
|
+
end
|
|
135
|
+
end
|
data/lib/light/openai.rb
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "openai/version"
|
|
4
|
+
require_relative "openai/client"
|
|
5
|
+
|
|
6
|
+
module Light
|
|
7
|
+
module OpenAI
|
|
8
|
+
class Error < StandardError; end
|
|
9
|
+
end
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
# Backwards compatibility for applications that previously referenced OpenAIClient directly.
|
|
13
|
+
OpenAIClient = Light::OpenAI::Client unless defined?(OpenAIClient)
|
metadata
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
|
2
|
+
name: light-openai-lib
|
|
3
|
+
version: !ruby/object:Gem::Version
|
|
4
|
+
version: 0.1.0
|
|
5
|
+
platform: ruby
|
|
6
|
+
authors:
|
|
7
|
+
- Gaetan Juvin
|
|
8
|
+
autorequire:
|
|
9
|
+
bindir: bin
|
|
10
|
+
cert_chain: []
|
|
11
|
+
date: 2025-11-15 00:00:00.000000000 Z
|
|
12
|
+
dependencies:
|
|
13
|
+
- !ruby/object:Gem::Dependency
|
|
14
|
+
name: httparty
|
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
|
16
|
+
requirements:
|
|
17
|
+
- - ">="
|
|
18
|
+
- !ruby/object:Gem::Version
|
|
19
|
+
version: '0.18'
|
|
20
|
+
- - "<"
|
|
21
|
+
- !ruby/object:Gem::Version
|
|
22
|
+
version: '0.23'
|
|
23
|
+
type: :runtime
|
|
24
|
+
prerelease: false
|
|
25
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
26
|
+
requirements:
|
|
27
|
+
- - ">="
|
|
28
|
+
- !ruby/object:Gem::Version
|
|
29
|
+
version: '0.18'
|
|
30
|
+
- - "<"
|
|
31
|
+
- !ruby/object:Gem::Version
|
|
32
|
+
version: '0.23'
|
|
33
|
+
- !ruby/object:Gem::Dependency
|
|
34
|
+
name: base64
|
|
35
|
+
requirement: !ruby/object:Gem::Requirement
|
|
36
|
+
requirements:
|
|
37
|
+
- - ">="
|
|
38
|
+
- !ruby/object:Gem::Version
|
|
39
|
+
version: '0.2'
|
|
40
|
+
- - "<"
|
|
41
|
+
- !ruby/object:Gem::Version
|
|
42
|
+
version: '1.0'
|
|
43
|
+
type: :runtime
|
|
44
|
+
prerelease: false
|
|
45
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
46
|
+
requirements:
|
|
47
|
+
- - ">="
|
|
48
|
+
- !ruby/object:Gem::Version
|
|
49
|
+
version: '0.2'
|
|
50
|
+
- - "<"
|
|
51
|
+
- !ruby/object:Gem::Version
|
|
52
|
+
version: '1.0'
|
|
53
|
+
- !ruby/object:Gem::Dependency
|
|
54
|
+
name: csv
|
|
55
|
+
requirement: !ruby/object:Gem::Requirement
|
|
56
|
+
requirements:
|
|
57
|
+
- - ">="
|
|
58
|
+
- !ruby/object:Gem::Version
|
|
59
|
+
version: '3.0'
|
|
60
|
+
- - "<"
|
|
61
|
+
- !ruby/object:Gem::Version
|
|
62
|
+
version: '5.0'
|
|
63
|
+
type: :runtime
|
|
64
|
+
prerelease: false
|
|
65
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
66
|
+
requirements:
|
|
67
|
+
- - ">="
|
|
68
|
+
- !ruby/object:Gem::Version
|
|
69
|
+
version: '3.0'
|
|
70
|
+
- - "<"
|
|
71
|
+
- !ruby/object:Gem::Version
|
|
72
|
+
version: '5.0'
|
|
73
|
+
- !ruby/object:Gem::Dependency
|
|
74
|
+
name: rake
|
|
75
|
+
requirement: !ruby/object:Gem::Requirement
|
|
76
|
+
requirements:
|
|
77
|
+
- - "~>"
|
|
78
|
+
- !ruby/object:Gem::Version
|
|
79
|
+
version: '13.0'
|
|
80
|
+
type: :development
|
|
81
|
+
prerelease: false
|
|
82
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
83
|
+
requirements:
|
|
84
|
+
- - "~>"
|
|
85
|
+
- !ruby/object:Gem::Version
|
|
86
|
+
version: '13.0'
|
|
87
|
+
- !ruby/object:Gem::Dependency
|
|
88
|
+
name: minitest
|
|
89
|
+
requirement: !ruby/object:Gem::Requirement
|
|
90
|
+
requirements:
|
|
91
|
+
- - "~>"
|
|
92
|
+
- !ruby/object:Gem::Version
|
|
93
|
+
version: '5.0'
|
|
94
|
+
type: :development
|
|
95
|
+
prerelease: false
|
|
96
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
97
|
+
requirements:
|
|
98
|
+
- - "~>"
|
|
99
|
+
- !ruby/object:Gem::Version
|
|
100
|
+
version: '5.0'
|
|
101
|
+
description: Extracted reusable OpenAI API client that wraps chat completions with
|
|
102
|
+
sane defaults, logging hooks, and retry/backoff handling.
|
|
103
|
+
email:
|
|
104
|
+
- opensource@example.com
|
|
105
|
+
executables: []
|
|
106
|
+
extensions: []
|
|
107
|
+
extra_rdoc_files: []
|
|
108
|
+
files:
|
|
109
|
+
- LICENSE.txt
|
|
110
|
+
- README.md
|
|
111
|
+
- examples/chat.rb
|
|
112
|
+
- lib/base64.rb
|
|
113
|
+
- lib/bigdecimal.rb
|
|
114
|
+
- lib/light/openai.rb
|
|
115
|
+
- lib/light/openai/base64_fallback.rb
|
|
116
|
+
- lib/light/openai/bigdecimal_fallback.rb
|
|
117
|
+
- lib/light/openai/client.rb
|
|
118
|
+
- lib/light/openai/version.rb
|
|
119
|
+
- lib/openai_client.rb
|
|
120
|
+
homepage: https://github.com/gaetanjuvin/light-openai-lib
|
|
121
|
+
licenses:
|
|
122
|
+
- MIT
|
|
123
|
+
metadata:
|
|
124
|
+
homepage_uri: https://github.com/gaetanjuvin/light-openai-lib
|
|
125
|
+
source_code_uri: https://github.com/gaetanjuvin/light-openai-lib
|
|
126
|
+
changelog_uri: https://github.com/gaetanjuvin/light-openai-lib
|
|
127
|
+
bug_tracker_uri: https://github.com/gaetanjuvin/light-openai-lib/issues
|
|
128
|
+
post_install_message:
|
|
129
|
+
rdoc_options: []
|
|
130
|
+
require_paths:
|
|
131
|
+
- lib
|
|
132
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
|
133
|
+
requirements:
|
|
134
|
+
- - ">="
|
|
135
|
+
- !ruby/object:Gem::Version
|
|
136
|
+
version: '2.6'
|
|
137
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
|
138
|
+
requirements:
|
|
139
|
+
- - ">="
|
|
140
|
+
- !ruby/object:Gem::Version
|
|
141
|
+
version: '0'
|
|
142
|
+
requirements: []
|
|
143
|
+
rubygems_version: 3.0.3.1
|
|
144
|
+
signing_key:
|
|
145
|
+
specification_version: 4
|
|
146
|
+
summary: Minimal OpenAI chat completions client with retry logic
|
|
147
|
+
test_files: []
|