ruby-openai 7.4.0 → 8.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 636458da173c91f143246acd3a13d2092345cfe9c2ad8b0496b4b5520f215272
4
- data.tar.gz: 7705418143eccf24f1b4bc97abab406919ac7297e130bbdde498081ad3539b61
3
+ metadata.gz: db56b0b2e16d752cb827fcc80f47d6880187d4f3140c29d3519f9527539dcc29
4
+ data.tar.gz: dff6c57719e2cf0ae912b9815d58c9cb08c46ff3b1e7e71bf1c4952de605bbad
5
5
  SHA512:
6
- metadata.gz: a2f6e3b4b275283e526bec8e68743156cd80840309d66050b74d1ab493695fcd6099fa0383f8b462cb6369af2a6118a8d83dd899e4d8a6a29e2d801ea9ecddd1
7
- data.tar.gz: 9e26a6e2b5a0b6f8b05ad7bb5c0799fa3a85aa96e772c020bd33332bf476b45c40a8d3574c7b11d057889d39040efb9d87e80bfb5932bf604f19cdef8122b05b
6
+ metadata.gz: 6a2035500d97c58f4637ccfdc40cd09edb9545a77242ee4b02a6f53fb83b4c2dd616e2e8069265439067ab8f52d54c5316e42cf067b9096820ff67672914ce17
7
+ data.tar.gz: 1ba769f55b5ca75b48aadf906e4b75618e090681d4410f0e60da6d470d432901197106538912e17b24762587f956c8b5770c7e338083878d672596736bc5b891
data/.circleci/config.yml CHANGED
@@ -38,9 +38,9 @@ workflows:
38
38
  matrix:
39
39
  parameters:
40
40
  ruby-image:
41
- - cimg/ruby:2.6-node
42
41
  - cimg/ruby:2.7-node
43
42
  - cimg/ruby:3.0-node
44
43
  - cimg/ruby:3.1-node
45
44
  - cimg/ruby:3.2-node
46
45
  - cimg/ruby:3.3-node
46
+ - cimg/ruby:3.4-node
data/CHANGELOG.md CHANGED
@@ -5,7 +5,29 @@ All notable changes to this project will be documented in this file.
5
5
  The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6
6
  and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7
7
 
8
- ## [7.4.0] - 2024-02-10
8
+ ## [8.0.0] - 2025-03-14
9
+
10
+ ### Added
11
+
12
+ - Add Responses endpoints - thanks to my excellent colleague [@simonx1](https://github.com/simonx1) for your work on this!
13
+ - Add docs for the Deepseek chat API.
14
+ - Add Models#delete - thanks to [bennysghost](https://github.com/bennysghost).
15
+
16
+ ### Fixed
17
+
18
+ - [BREAKING] Try to JSON parse everything. If it fails, fall back gracefully to returning the raw response. Thank you to [@gregszero](https://github.com/gregszero) and the many others who raised this issue.
19
+ - [BREAKING] An unknown file type will no longer prevent file upload, but instead raise a warning.
20
+ - [BREAKING] ruby-openai longer requires "faraday/multipart" for Faraday 1 users (Faraday 1 already includes it and it was causing a warning). Thanks to [ajGingrich](https://github.com/ajGingrich) for raising this!
21
+ - Add `user_data` and `evals` as options for known File types - thank you to [jontec](https://github.com/jontec) for this fix!
22
+ - Fix a syntax ambiguity in Client.rb - thank you to [viralpraxis](https://github.com/viralpraxis).
23
+
24
+ ### Removed
25
+
26
+ - [BREAKING] Backwards compatibility for `require "ruby/openai"` is removed - from v8 on you MUST use `require "openai"`. This fixes a deprecation warning with Ruby 3.4. Thanks to [@ndemianc](https://github.com/ndemianc) for this PR.
27
+ - [BREAKING] Removed support for Ruby 2.6. ruby-openai may still work with this version but it's no longer supported.
28
+ - Removed the 'OpenAI-Beta' header from Batches API requests.
29
+
30
+ ## [7.4.0] - 2025-02-10
9
31
 
10
32
  ### Added
11
33
 
data/Gemfile CHANGED
@@ -3,10 +3,11 @@ source "https://rubygems.org"
3
3
  # Include gem dependencies from ruby-openai.gemspec
4
4
  gemspec
5
5
 
6
+ # Development dependencies. Not included in the publised gem.
6
7
  gem "byebug", "~> 11.1.3"
7
- gem "dotenv", "~> 2.8.1"
8
- gem "rake", "~> 13.2"
8
+ gem "dotenv", "~> 2.8.1" # >= v3 will require removing support for Ruby 2.7 from CI.
9
+ gem "rake", "~> 13.2.1"
9
10
  gem "rspec", "~> 3.13"
10
- gem "rubocop", "~> 1.50.2"
11
- gem "vcr", "~> 6.1.0"
12
- gem "webmock", "~> 3.24.0"
11
+ gem "rubocop", "~> 1.74.0"
12
+ gem "vcr", "~> 6.3.1"
13
+ gem "webmock", "~> 3.25.1"
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- ruby-openai (7.4.0)
4
+ ruby-openai (8.0.0)
5
5
  event_stream_parser (>= 0.3.0, < 2.0.0)
6
6
  faraday (>= 1)
7
7
  faraday-multipart (>= 1)
@@ -13,7 +13,7 @@ GEM
13
13
  public_suffix (>= 2.0.2, < 7.0)
14
14
  ast (2.4.2)
15
15
  base64 (0.2.0)
16
- bigdecimal (3.1.8)
16
+ bigdecimal (3.1.9)
17
17
  byebug (11.1.3)
18
18
  crack (1.0.0)
19
19
  bigdecimal
@@ -28,16 +28,20 @@ GEM
28
28
  faraday-multipart (1.0.4)
29
29
  multipart-post (~> 2)
30
30
  faraday-net_http (3.0.2)
31
- hashdiff (1.1.1)
32
- json (2.6.3)
31
+ hashdiff (1.1.2)
32
+ json (2.10.2)
33
+ language_server-protocol (3.17.0.4)
34
+ lint_roller (1.1.0)
33
35
  multipart-post (2.3.0)
34
- parallel (1.22.1)
35
- parser (3.2.2.0)
36
+ parallel (1.26.3)
37
+ parser (3.3.7.1)
36
38
  ast (~> 2.4.1)
37
- public_suffix (5.1.1)
39
+ racc
40
+ public_suffix (6.0.1)
41
+ racc (1.8.1)
38
42
  rainbow (3.1.1)
39
43
  rake (13.2.1)
40
- regexp_parser (2.8.0)
44
+ regexp_parser (2.10.0)
41
45
  rexml (3.3.9)
42
46
  rspec (3.13.0)
43
47
  rspec-core (~> 3.13.0)
@@ -52,23 +56,27 @@ GEM
52
56
  diff-lcs (>= 1.2.0, < 2.0)
53
57
  rspec-support (~> 3.13.0)
54
58
  rspec-support (3.13.1)
55
- rubocop (1.50.2)
59
+ rubocop (1.74.0)
56
60
  json (~> 2.3)
61
+ language_server-protocol (~> 3.17.0.2)
62
+ lint_roller (~> 1.1.0)
57
63
  parallel (~> 1.10)
58
- parser (>= 3.2.0.0)
64
+ parser (>= 3.3.0.2)
59
65
  rainbow (>= 2.2.2, < 4.0)
60
- regexp_parser (>= 1.8, < 3.0)
61
- rexml (>= 3.2.5, < 4.0)
62
- rubocop-ast (>= 1.28.0, < 2.0)
66
+ regexp_parser (>= 2.9.3, < 3.0)
67
+ rubocop-ast (>= 1.38.0, < 2.0)
63
68
  ruby-progressbar (~> 1.7)
64
- unicode-display_width (>= 2.4.0, < 3.0)
65
- rubocop-ast (1.28.0)
66
- parser (>= 3.2.1.0)
69
+ unicode-display_width (>= 2.4.0, < 4.0)
70
+ rubocop-ast (1.38.1)
71
+ parser (>= 3.3.1.0)
67
72
  ruby-progressbar (1.13.0)
68
73
  ruby2_keywords (0.0.5)
69
- unicode-display_width (2.4.2)
70
- vcr (6.1.0)
71
- webmock (3.24.0)
74
+ unicode-display_width (3.1.4)
75
+ unicode-emoji (~> 4.0, >= 4.0.4)
76
+ unicode-emoji (4.0.4)
77
+ vcr (6.3.1)
78
+ base64
79
+ webmock (3.25.1)
72
80
  addressable (>= 2.8.0)
73
81
  crack (>= 0.3.2)
74
82
  hashdiff (>= 0.4.0, < 2.0.0)
@@ -79,12 +87,12 @@ PLATFORMS
79
87
  DEPENDENCIES
80
88
  byebug (~> 11.1.3)
81
89
  dotenv (~> 2.8.1)
82
- rake (~> 13.2)
90
+ rake (~> 13.2.1)
83
91
  rspec (~> 3.13)
84
- rubocop (~> 1.50.2)
92
+ rubocop (~> 1.74.0)
85
93
  ruby-openai!
86
- vcr (~> 6.1.0)
87
- webmock (~> 3.24.0)
94
+ vcr (~> 6.3.1)
95
+ webmock (~> 3.25.1)
88
96
 
89
97
  BUNDLED WITH
90
98
  2.4.5
data/README.md CHANGED
@@ -29,6 +29,7 @@ Stream text with GPT-4, transcribe and translate audio with Whisper, or create i
29
29
  - [Errors](#errors)
30
30
  - [Faraday middleware](#faraday-middleware)
31
31
  - [Azure](#azure)
32
+ - [Deepseek](#deepseek)
32
33
  - [Ollama](#ollama)
33
34
  - [Groq](#groq)
34
35
  - [Counting Tokens](#counting-tokens)
@@ -37,6 +38,7 @@ Stream text with GPT-4, transcribe and translate audio with Whisper, or create i
37
38
  - [Streaming Chat](#streaming-chat)
38
39
  - [Vision](#vision)
39
40
  - [JSON Mode](#json-mode)
41
+ - [Responses API](#responses-api)
40
42
  - [Functions](#functions)
41
43
  - [Completions](#completions)
42
44
  - [Embeddings](#embeddings)
@@ -228,6 +230,28 @@ end
228
230
 
229
231
  where `AZURE_OPENAI_URI` is e.g. `https://custom-domain.openai.azure.com/openai/deployments/gpt-35-turbo`
230
232
 
233
+ #### Deepseek
234
+
235
+ [Deepseek](https://api-docs.deepseek.com/) is compatible with the OpenAI chat API. Get an access token from [here](https://platform.deepseek.com/api_keys), then:
236
+
237
+ ```ruby
238
+ client = OpenAI::Client.new(
239
+ access_token: "deepseek_access_token_goes_here",
240
+ uri_base: "https://api.deepseek.com/"
241
+ )
242
+
243
+ client.chat(
244
+ parameters: {
245
+ model: "deepseek-chat", # Required.
246
+ messages: [{ role: "user", content: "Hello!"}], # Required.
247
+ temperature: 0.7,
248
+ stream: proc do |chunk, _bytesize|
249
+ print chunk.dig("choices", 0, "delta", "content")
250
+ end
251
+ }
252
+ )
253
+ ```
254
+
231
255
  #### Ollama
232
256
 
233
257
  Ollama allows you to run open-source LLMs, such as Llama 3, locally. It [offers chat compatibility](https://github.com/ollama/ollama/blob/main/docs/openai.md) with the OpenAI API.
@@ -304,6 +328,12 @@ client.models.list
304
328
  client.models.retrieve(id: "gpt-4o")
305
329
  ```
306
330
 
331
+ You can also delete any finetuned model you generated, if you're an account Owner on your OpenAI organization:
332
+
333
+ ```ruby
334
+ client.models.delete(id: "ft:gpt-4o-mini:acemeco:suffix:abc123")
335
+ ```
336
+
307
337
  ### Chat
308
338
 
309
339
  GPT is a model that can be used to generate text in a conversational style. You can use it to [generate a response](https://platform.openai.com/docs/api-reference/chat/create) to a sequence of [messages](https://platform.openai.com/docs/guides/chat/introduction):
@@ -441,6 +471,94 @@ You can stream it as well!
441
471
  # }
442
472
  ```
443
473
 
474
+ ### Responses API
475
+ [OpenAI's most advanced interface for generating model responses](https://platform.openai.com/docs/api-reference/responses). Supports text and image inputs, and text outputs. Create stateful interactions with the model, using the output of previous responses as input. Extend the model's capabilities with built-in tools for file search, web search, computer use, and more. Allow the model access to external systems and data using function calling.
476
+
477
+ #### Create a Response
478
+ ```ruby
479
+ response = client.responses.create(parameters: {
480
+ model: "gpt-4o",
481
+ input: "Hello! I'm Szymon!"
482
+ })
483
+ puts response.dig("output", 0, "content", 0, "text")
484
+ # => Hello Szymon! How can I assist you today?
485
+ ```
486
+
487
+ #### Follow-up Messages
488
+ ```ruby
489
+ followup = client.responses.create(parameters: {
490
+ model: "gpt-4o",
491
+ input: "Remind me, what is my name?",
492
+ previous_response_id: response["id"]
493
+ })
494
+ puts followup.dig("output", 0, "content", 0, "text")
495
+ # => Your name is Szymon! How can I help you today?
496
+ ```
497
+
498
+ #### Tool Calls
499
+ ```ruby
500
+ response = client.responses.create(parameters: {
501
+ model: "gpt-4o",
502
+ input: "What's the weather in Paris?",
503
+ tools: [
504
+ {
505
+ "type" => "function",
506
+ "name" => "get_current_weather",
507
+ "description" => "Get the current weather in a given location",
508
+ "parameters" => {
509
+ "type" => "object",
510
+ "properties" => {
511
+ "location" => {
512
+ "type" => "string",
513
+ "description" => "The geographic location to get the weather for"
514
+ }
515
+ },
516
+ "required" => ["location"]
517
+ }
518
+ }
519
+ ]
520
+ })
521
+ puts response.dig("output", 0, "name")
522
+ # => "get_current_weather"
523
+ ```
524
+
525
+ #### Streaming
526
+ ```ruby
527
+ client.responses.create(
528
+ parameters: {
529
+ model: "gpt-4o", # Required.
530
+ input: "Hello!", # Required.
531
+ stream: proc do |chunk, _bytesize|
532
+ if chunk["type"] == "response.output_text.delta"
533
+ print chunk["delta"]
534
+ $stdout.flush # Ensure output is displayed immediately
535
+ end
536
+ end
537
+ }
538
+ )
539
+ # => "Hi there! How can I assist you today?..."
540
+ ```
541
+
542
+ #### Retrieve a Response
543
+ ```ruby
544
+ retrieved_response = client.responses.retrieve(response_id: response["id"])
545
+ puts retrieved_response["object"]
546
+ # => "response"
547
+ ```
548
+
549
+ #### Delete a Response
550
+ ```ruby
551
+ deletion = client.responses.delete(response_id: response["id"])
552
+ puts deletion["deleted"]
553
+ # => true
554
+ ```
555
+
556
+ #### List Input Items
557
+ ```ruby
558
+ input_items = client.responses.input_items(response_id: response["id"])
559
+ puts input_items["object"] # => "list"
560
+ ```
561
+
444
562
  ### Functions
445
563
 
446
564
  You can describe and pass in functions and the model will intelligently choose to output a JSON object containing arguments to call them - eg., to use your method `get_current_weather` to get the weather in a given location. Note that tool_choice is optional, but if you exclude it, the model will choose whether to use the function or not ([see here](https://platform.openai.com/docs/api-reference/chat/create#chat-create-tool_choice)).
@@ -746,6 +864,12 @@ You can also capture the events for a job:
746
864
  client.finetunes.list_events(id: fine_tune_id)
747
865
  ```
748
866
 
867
+ You can also delete any finetuned model you generated, if you're an account Owner on your OpenAI organization:
868
+
869
+ ```ruby
870
+ client.models.delete(id: fine_tune_id)
871
+ ```
872
+
749
873
  ### Vector Stores
750
874
 
751
875
  Vector Store objects give the File Search tool the ability to search your files.
@@ -1544,6 +1668,12 @@ To run all tests, execute the command `bundle exec rake`, which will also run th
1544
1668
  > [!WARNING]
1545
1669
  > If you have an `OPENAI_ACCESS_TOKEN` and `OPENAI_ADMIN_TOKEN` in your `ENV`, running the specs will hit the actual API, which will be slow and cost you money - 2 cents or more! Remove them from your environment with `unset` or similar if you just want to run the specs against the stored VCR responses.
1546
1670
 
1671
+ ### To check for deprecations
1672
+
1673
+ ```
1674
+ bundle exec ruby -e "Warning[:deprecated] = true; require 'rspec'; exit RSpec::Core::Runner.run(['spec/openai/client/http_spec.rb:25'])"
1675
+ ```
1676
+
1547
1677
  ## Release
1548
1678
 
1549
1679
  First run the specs without VCR so they actually hit the API. This will cost 2 cents or more. Set OPENAI_ACCESS_TOKEN and OPENAI_ADMIN_TOKEN in your environment.
@@ -1,7 +1,7 @@
1
1
  module OpenAI
2
2
  class Batches
3
3
  def initialize(client:)
4
- @client = client.beta(assistants: OpenAI::Assistants::BETA_VERSION)
4
+ @client = client
5
5
  end
6
6
 
7
7
  def list(parameters: {})
data/lib/openai/client.rb CHANGED
@@ -5,7 +5,7 @@ module OpenAI
5
5
  SENSITIVE_ATTRIBUTES = %i[@access_token @admin_token @organization_id @extra_headers].freeze
6
6
  CONFIG_KEYS = %i[access_token admin_token api_type api_version extra_headers
7
7
  log_errors organization_id request_timeout uri_base].freeze
8
- attr_reader *CONFIG_KEYS, :faraday_middleware
8
+ attr_reader(*CONFIG_KEYS, :faraday_middleware)
9
9
  attr_writer :access_token
10
10
 
11
11
  def initialize(config = {}, &faraday_middleware)
@@ -52,6 +52,10 @@ module OpenAI
52
52
  @models ||= OpenAI::Models.new(client: self)
53
53
  end
54
54
 
55
+ def responses
56
+ @responses ||= OpenAI::Responses.new(client: self)
57
+ end
58
+
55
59
  def assistants
56
60
  @assistants ||= OpenAI::Assistants.new(client: self)
57
61
  end
data/lib/openai/files.rb CHANGED
@@ -5,6 +5,8 @@ module OpenAI
5
5
  batch
6
6
  fine-tune
7
7
  vision
8
+ user_data
9
+ evals
8
10
  ].freeze
9
11
 
10
12
  def initialize(client:)
@@ -18,9 +20,7 @@ module OpenAI
18
20
  def upload(parameters: {})
19
21
  file_input = parameters[:file]
20
22
  file = prepare_file_input(file_input: file_input)
21
-
22
23
  validate(file: file, purpose: parameters[:purpose], file_input: file_input)
23
-
24
24
  @client.multipart_post(
25
25
  path: "/files",
26
26
  parameters: parameters.merge(file: file)
@@ -55,8 +55,12 @@ module OpenAI
55
55
 
56
56
  def validate(file:, purpose:, file_input:)
57
57
  raise ArgumentError, "`file` is required" if file.nil?
58
+
58
59
  unless PURPOSES.include?(purpose)
59
- raise ArgumentError, "`purpose` must be one of `#{PURPOSES.join(',')}`"
60
+ filename = file_input.is_a?(String) ? File.basename(file_input) : "uploaded file"
61
+ message = "The purpose '#{purpose}' for file '#{filename}' is not in the known purpose "
62
+ message += "list: #{PURPOSES.join(', ')}."
63
+ OpenAI.log_message("Warning", message, :warn)
60
64
  end
61
65
 
62
66
  validate_jsonl(file: file) if file_input.is_a?(String) && file_input.end_with?(".jsonl")
data/lib/openai/http.rb CHANGED
@@ -7,47 +7,52 @@ module OpenAI
7
7
  include HTTPHeaders
8
8
 
9
9
  def get(path:, parameters: nil)
10
- parse_jsonl(conn.get(uri(path: path), parameters) do |req|
10
+ parse_json(conn.get(uri(path: path), parameters) do |req|
11
11
  req.headers = headers
12
12
  end&.body)
13
13
  end
14
14
 
15
15
  def post(path:)
16
- parse_jsonl(conn.post(uri(path: path)) do |req|
16
+ parse_json(conn.post(uri(path: path)) do |req|
17
17
  req.headers = headers
18
18
  end&.body)
19
19
  end
20
20
 
21
21
  def json_post(path:, parameters:, query_parameters: {})
22
- conn.post(uri(path: path)) do |req|
22
+ parse_json(conn.post(uri(path: path)) do |req|
23
23
  configure_json_post_request(req, parameters)
24
24
  req.params = req.params.merge(query_parameters)
25
- end&.body
25
+ end&.body)
26
26
  end
27
27
 
28
28
  def multipart_post(path:, parameters: nil)
29
- conn(multipart: true).post(uri(path: path)) do |req|
29
+ parse_json(conn(multipart: true).post(uri(path: path)) do |req|
30
30
  req.headers = headers.merge({ "Content-Type" => "multipart/form-data" })
31
31
  req.body = multipart_parameters(parameters)
32
- end&.body
32
+ end&.body)
33
33
  end
34
34
 
35
35
  def delete(path:)
36
- conn.delete(uri(path: path)) do |req|
36
+ parse_json(conn.delete(uri(path: path)) do |req|
37
37
  req.headers = headers
38
- end&.body
38
+ end&.body)
39
39
  end
40
40
 
41
41
  private
42
42
 
43
- def parse_jsonl(response)
43
+ def parse_json(response)
44
44
  return unless response
45
45
  return response unless response.is_a?(String)
46
46
 
47
- # Convert a multiline string of JSON objects to a JSON array.
48
- response = response.gsub("}\n{", "},{").prepend("[").concat("]")
47
+ original_response = response.dup
48
+ if response.include?("}\n{")
49
+ # Attempt to convert what looks like a multiline string of JSON objects to a JSON array.
50
+ response = response.gsub("}\n{", "},{").prepend("[").concat("]")
51
+ end
49
52
 
50
53
  JSON.parse(response)
54
+ rescue JSON::ParserError
55
+ original_response
51
56
  end
52
57
 
53
58
  # Given a proc, returns an outer proc that can be used to iterate over a JSON stream of chunks.
data/lib/openai/models.rb CHANGED
@@ -11,5 +11,9 @@ module OpenAI
11
11
  def retrieve(id:)
12
12
  @client.get(path: "/models/#{id}")
13
13
  end
14
+
15
+ def delete(id:)
16
+ @client.delete(path: "/models/#{id}")
17
+ end
14
18
  end
15
19
  end
@@ -0,0 +1,23 @@
1
+ module OpenAI
2
+ class Responses
3
+ def initialize(client:)
4
+ @client = client
5
+ end
6
+
7
+ def create(parameters: {})
8
+ @client.json_post(path: "/responses", parameters: parameters)
9
+ end
10
+
11
+ def retrieve(response_id:)
12
+ @client.get(path: "/responses/#{response_id}")
13
+ end
14
+
15
+ def delete(response_id:)
16
+ @client.delete(path: "/responses/#{response_id}")
17
+ end
18
+
19
+ def input_items(response_id:, parameters: {})
20
+ @client.get(path: "/responses/#{response_id}/input_items", parameters: parameters)
21
+ end
22
+ end
23
+ end
@@ -1,3 +1,3 @@
1
1
  module OpenAI
2
- VERSION = "7.4.0".freeze
2
+ VERSION = "8.0.0".freeze
3
3
  end
data/lib/openai.rb CHANGED
@@ -1,12 +1,12 @@
1
1
  require "faraday"
2
- require "faraday/multipart"
3
-
2
+ require "faraday/multipart" if Gem::Version.new(Faraday::VERSION) >= Gem::Version.new("2.0")
4
3
  require_relative "openai/http"
5
4
  require_relative "openai/client"
6
5
  require_relative "openai/files"
7
6
  require_relative "openai/finetunes"
8
7
  require_relative "openai/images"
9
8
  require_relative "openai/models"
9
+ require_relative "openai/responses"
10
10
  require_relative "openai/assistants"
11
11
  require_relative "openai/threads"
12
12
  require_relative "openai/messages"
@@ -31,12 +31,7 @@ module OpenAI
31
31
  rescue Faraday::Error => e
32
32
  raise e unless e.response.is_a?(Hash)
33
33
 
34
- logger = Logger.new($stdout)
35
- logger.formatter = proc do |_severity, _datetime, _progname, msg|
36
- "\033[31mOpenAI HTTP Error (spotted in ruby-openai #{VERSION}): #{msg}\n\033[0m"
37
- end
38
- logger.error(e.response[:body])
39
-
34
+ OpenAI.log_message("OpenAI HTTP Error", e.response[:body], :error)
40
35
  raise e
41
36
  end
42
37
  end
@@ -72,25 +67,38 @@ module OpenAI
72
67
 
73
68
  class << self
74
69
  attr_writer :configuration
75
- end
76
70
 
77
- def self.configuration
78
- @configuration ||= OpenAI::Configuration.new
79
- end
71
+ def configuration
72
+ @configuration ||= OpenAI::Configuration.new
73
+ end
80
74
 
81
- def self.configure
82
- yield(configuration)
83
- end
75
+ def configure
76
+ yield(configuration)
77
+ end
84
78
 
85
- # Estimate the number of tokens in a string, using the rules of thumb from OpenAI:
86
- # https://help.openai.com/en/articles/4936856-what-are-tokens-and-how-to-count-them
87
- def self.rough_token_count(content = "")
88
- raise ArgumentError, "rough_token_count requires a string" unless content.is_a? String
89
- return 0 if content.empty?
79
+ # Estimate the number of tokens in a string, using the rules of thumb from OpenAI:
80
+ # https://help.openai.com/en/articles/4936856-what-are-tokens-and-how-to-count-them
81
+ def rough_token_count(content = "")
82
+ raise ArgumentError, "rough_token_count requires a string" unless content.is_a? String
83
+ return 0 if content.empty?
84
+
85
+ count_by_chars = content.size / 4.0
86
+ count_by_words = content.split.size * 4.0 / 3
87
+ estimate = ((count_by_chars + count_by_words) / 2.0).round
88
+ [1, estimate].max
89
+ end
90
90
 
91
- count_by_chars = content.size / 4.0
92
- count_by_words = content.split.size * 4.0 / 3
93
- estimate = ((count_by_chars + count_by_words) / 2.0).round
94
- [1, estimate].max
91
+ # Log a message with appropriate formatting
92
+ # @param prefix [String] Prefix to add to the message
93
+ # @param message [String] The message to log
94
+ # @param level [Symbol] The log level (:error, :warn, etc.)
95
+ def log_message(prefix, message, level = :warn)
96
+ color = level == :error ? "\033[31m" : "\033[33m"
97
+ logger = Logger.new($stdout)
98
+ logger.formatter = proc do |_severity, _datetime, _progname, msg|
99
+ "#{color}#{prefix} (spotted in ruby-openai #{VERSION}): #{msg}\n\033[0m"
100
+ end
101
+ logger.send(level, message)
102
+ end
95
103
  end
96
104
  end
data/lib/ruby/openai.rb CHANGED
@@ -1,2 +1 @@
1
1
  require_relative "../openai"
2
- require_relative "../openai/compatibility"
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ruby-openai
3
3
  version: !ruby/object:Gem::Version
4
- version: 7.4.0
4
+ version: 8.0.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Alex
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2025-02-10 00:00:00.000000000 Z
11
+ date: 2025-03-14 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: event_stream_parser
@@ -91,7 +91,6 @@ files:
91
91
  - lib/openai/audio.rb
92
92
  - lib/openai/batches.rb
93
93
  - lib/openai/client.rb
94
- - lib/openai/compatibility.rb
95
94
  - lib/openai/files.rb
96
95
  - lib/openai/finetunes.rb
97
96
  - lib/openai/http.rb
@@ -99,6 +98,7 @@ files:
99
98
  - lib/openai/images.rb
100
99
  - lib/openai/messages.rb
101
100
  - lib/openai/models.rb
101
+ - lib/openai/responses.rb
102
102
  - lib/openai/run_steps.rb
103
103
  - lib/openai/runs.rb
104
104
  - lib/openai/threads.rb
@@ -1,11 +0,0 @@
1
- module Ruby
2
- module OpenAI
3
- VERSION = ::OpenAI::VERSION
4
-
5
- Error = ::OpenAI::Error
6
- AuthenticationError = ::OpenAI::AuthenticationError
7
- ConfigurationError = ::OpenAI::ConfigurationError
8
- Configuration = ::OpenAI::Configuration
9
- MiddlewareErrors = ::OpenAI::MiddlewareErrors
10
- end
11
- end