llm_gateway 0.2.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,110 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "base64"
4
+ require_relative "bidirectional_message_mapper"
5
+
6
+ module LlmGateway
7
+ module Adapters
8
+ module OpenAi
9
+ module ChatCompletions
10
+ class InputMapper
11
+ def self.map(data)
12
+ {
13
+ messages: map_messages(data[:messages]),
14
+ response_format: map_response_format(data[:response_format]),
15
+ tools: map_tools(data[:tools]),
16
+ system: map_system(data[:system])
17
+ }
18
+ end
19
+
20
+ private
21
+
22
+ def self.map_response_format(response_format)
23
+ response_format
24
+ end
25
+
26
+ def self.map_messages(messages)
27
+ return messages unless messages
28
+
29
+ message_mapper = BidirectionalMessageMapper.new(LlmGateway::DIRECTION_IN)
30
+
31
+ # First map messages like Claude
32
+ mapped_messages = messages.map do |msg|
33
+ msg = msg.merge(role: "user") if msg[:role] == "developer"
34
+
35
+ content = if msg[:content].is_a?(Array)
36
+ msg[:content].map do |content|
37
+ message_mapper.map_content(content)
38
+ end
39
+ else
40
+ [ message_mapper.map_content(msg[:content]) ]
41
+ end
42
+
43
+ {
44
+ role: msg[:role],
45
+ content: content
46
+ }
47
+ end
48
+ # Then transform to OpenAI format
49
+ mapped_messages.flat_map do |msg|
50
+ # Handle array content with tool calls and tool results
51
+ tool_calls = []
52
+ regular_content = []
53
+ tool_messages = []
54
+ msg[:content].each do |content|
55
+ case content[:type] || content[:role]
56
+ when "tool"
57
+ tool_messages << content
58
+ when "function"
59
+ tool_calls << content
60
+ else
61
+ regular_content << content
62
+ end
63
+ end
64
+ result = []
65
+
66
+ # Add the main message with tool calls if any
67
+ if tool_calls.any? || regular_content.any?
68
+ main_msg = msg.dup
69
+ main_msg[:role] = "assistant" if !main_msg[:role]
70
+ main_msg[:tool_calls] = tool_calls if tool_calls.any?
71
+ main_msg[:content] = regular_content.any? ? regular_content : nil
72
+ result << main_msg
73
+ end
74
+
75
+ # Add separate tool result messages
76
+ result += tool_messages
77
+
78
+ result
79
+ end
80
+ end
81
+
82
+ def self.map_tools(tools)
83
+ return tools unless tools
84
+
85
+ tools.map do |tool|
86
+ {
87
+ type: "function",
88
+ function: {
89
+ name: tool[:name],
90
+ description: tool[:description],
91
+ parameters: tool[:input_schema]
92
+ }
93
+ }
94
+ end
95
+ end
96
+
97
+ def self.map_system(system)
98
+ if !system || system.empty?
99
+ []
100
+ else
101
+ system.map do |msg|
102
+ msg[:role] == "system" ? msg.merge(role: "developer") : msg
103
+ end
104
+ end
105
+ end
106
+ end
107
+ end
108
+ end
109
+ end
110
+ end
@@ -0,0 +1,40 @@
1
+ # frozen_string_literal: true
2
+
3
+ module LlmGateway
4
+ module Adapters
5
+ module OpenAi
6
+ module ChatCompletions
7
+ class OutputMapper
8
+ def self.map(data)
9
+ {
10
+ id: data[:id],
11
+ model: data[:model],
12
+ usage: data[:usage],
13
+ choices: map_choices(data[:choices])
14
+ }
15
+ end
16
+
17
+ private
18
+
19
+ def self.map_choices(choices)
20
+ return [] unless choices
21
+ message_mapper = BidirectionalMessageMapper.new(LlmGateway::DIRECTION_OUT)
22
+
23
+ choices.map do |choice|
24
+ message = choice[:message] || {}
25
+ content_item = message_mapper.map_content(message[:content])
26
+ tool_calls = message[:tool_calls] ? message[:tool_calls].map { |tool_call| message_mapper.map_content(tool_call) } : []
27
+
28
+ # Only include content_item if it has actual text content
29
+ content_array = []
30
+ content_array << content_item if LlmGateway::Utils.present?(content_item[:text])
31
+ content_array += tool_calls
32
+
33
+ { role: message[:role], content: content_array }
34
+ end
35
+ end
36
+ end
37
+ end
38
+ end
39
+ end
40
+ end
@@ -22,6 +22,23 @@ module LlmGateway
22
22
  post("chat/completions", body)
23
23
  end
24
24
 
25
+ def responses(messages, response_format: { type: "text" }, tools: nil, system: [], max_completion_tokens: 4096)
26
+ body = {
27
+ model: model_key,
28
+ max_output_tokens: max_completion_tokens,
29
+ input: messages.flatten
30
+ }
31
+ body[:instructions] = system[0][:content] if system.any?
32
+ body[:tools] = tools if tools
33
+ result = post("responses", body)
34
+ result
35
+ end
36
+
37
+
38
+ def download_file(file_id)
39
+ get("files/#{file_id}/content")
40
+ end
41
+
25
42
  def generate_embeddings(input)
26
43
  body = {
27
44
  input:,
@@ -30,6 +47,10 @@ module LlmGateway
30
47
  post("embeddings", body)
31
48
  end
32
49
 
50
+ def upload_file(filename, content, mime_type = "application/octet-stream", purpose: "user_data")
51
+ post_file("files", content, filename, purpose: purpose, mime_type: mime_type)
52
+ end
53
+
33
54
  private
34
55
 
35
56
  def build_headers
@@ -0,0 +1,25 @@
1
+ # frozen_string_literal: true
2
+
3
+ module LlmGateway
4
+ module Adapters
5
+ module OpenAi
6
+ class FileOutputMapper
7
+ def self.map(data)
8
+ bytes = data.delete(:bytes)
9
+ data.delete(:object) # Didnt see much value in this only option is "file"
10
+ data.delete(:status) # Deprecated so no need to pull through
11
+ data.delete(:status_details) # Deprecated so no need to pull through
12
+ created_at = data.delete(:created_at)
13
+ time = Time.at(created_at, in: "UTC")
14
+ iso_format = time.iso8601(6)
15
+ data.merge(
16
+ size_bytes: bytes,
17
+ downloadable: data[:purpose] != "user_data",
18
+ mime_type: nil,
19
+ created_at: iso_format # Claude api format, easier for human reading so kept it this way
20
+ )
21
+ end
22
+ end
23
+ end
24
+ end
25
+ end
@@ -0,0 +1,72 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "base64"
4
+
5
+ module LlmGateway
6
+ module Adapters
7
+ module OpenAi
8
+ module Responses
9
+ class BidirectionalMessageMapper < OpenAi::ChatCompletions::BidirectionalMessageMapper
10
+ def map_content(content)
11
+ # Convert string content to text format
12
+ #
13
+
14
+ content = { type: "text", text: content } unless content.is_a?(Hash)
15
+ case content[:type]
16
+ when "text"
17
+ map_text_content(content)
18
+ when "message"
19
+ map_messages(content)
20
+ when "output_text"
21
+ map_output_text_content(content)
22
+ when "tool_use"
23
+ map_tool_use_content(content)
24
+ when "function_call"
25
+ map_tool_use_content(content)
26
+ when "tool_result"
27
+ map_tool_result_content(content)
28
+ else
29
+ content
30
+ end
31
+ end
32
+
33
+ private
34
+
35
+ def map_messages(message)
36
+ message[:content].map { |content| map_content(content) }
37
+ end
38
+
39
+ def map_tool_result_content(content)
40
+ {
41
+ "type": "function_call_output",
42
+ "call_id": content[:tool_use_id],
43
+ "output": content[:content]
44
+ }
45
+ end
46
+
47
+ def map_tool_use_content(content)
48
+ if direction == LlmGateway::DIRECTION_OUT
49
+ { id: content[:call_id], type: "tool_use", name: content[:name], input: parse_tool_arguments(content[:arguments]) }
50
+ else
51
+ { id: content[:id] }
52
+ end
53
+ end
54
+
55
+ def map_output_text_content(content)
56
+ {
57
+ type: "text",
58
+ text: content[:text]
59
+ }
60
+ end
61
+
62
+ def map_text_content(content)
63
+ {
64
+ type: "input_text",
65
+ text: content[:text]
66
+ }
67
+ end
68
+ end
69
+ end
70
+ end
71
+ end
72
+ end
@@ -0,0 +1,62 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "base64"
4
+ require_relative "bidirectional_message_mapper"
5
+
6
+ module LlmGateway
7
+ module Adapters
8
+ module OpenAi
9
+ module Responses
10
+ class InputMapper < OpenAi::ChatCompletions::InputMapper
11
+ def self.message_mapper
12
+ BidirectionalMessageMapper.new(LlmGateway::DIRECTION_IN)
13
+ end
14
+
15
+ def self.map_tools(tools)
16
+ return tools unless tools
17
+
18
+ tools.map do |tool|
19
+ {
20
+ type: "function",
21
+ name: tool[:name],
22
+ description: tool[:description],
23
+ parameters: tool[:input_schema]
24
+ }
25
+ end
26
+ end
27
+
28
+ def self.map_messages(messages)
29
+ return messages unless messages
30
+ mapper = message_mapper
31
+
32
+ # First map messages like Claude
33
+ messages.map do |msg|
34
+ if msg[:id]
35
+ msg = msg.merge(role: "assistant")
36
+ msg.slice(:id)
37
+ else
38
+ content = if msg[:content].is_a?(Array)
39
+ msg[:content].map do |content|
40
+ mapper.map_content(content)
41
+ end
42
+ elsif msg[:id]
43
+ mapper.map_content(msg)
44
+ else
45
+ [ mapper.map_content(msg[:content]) ]
46
+ end
47
+ if msg.dig(:content).is_a?(Array) && msg.dig(:content, 0, :type) == "tool_result"
48
+ content
49
+ else
50
+ {
51
+ role: msg[:role],
52
+ content: content
53
+ }
54
+ end
55
+ end
56
+ end
57
+ end
58
+ end
59
+ end
60
+ end
61
+ end
62
+ end
@@ -0,0 +1,47 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "base64"
4
+ require_relative "bidirectional_message_mapper"
5
+
6
+ module LlmGateway
7
+ module Adapters
8
+ module OpenAi
9
+ module Responses
10
+ class OutputMapper
11
+ def self.map(data)
12
+ {
13
+ id: data[:id],
14
+ model: data[:model],
15
+ usage: data[:usage],
16
+ choices: map_choices(data[:output])
17
+ }
18
+ end
19
+
20
+ private
21
+
22
+ def self.map_choices(choices)
23
+ return [] unless choices
24
+ message_mapper = BidirectionalMessageMapper.new(LlmGateway::DIRECTION_OUT)
25
+ choices.map do |choice|
26
+ content = if choice[:id].start_with?("fc_")
27
+ {
28
+ id: choice[:id],
29
+ role: choice[:role] || "assistant", # tool call doesnt have a role apparently
30
+ content: [ message_mapper.map_content(choice) ].flatten
31
+ }
32
+ else
33
+ content = message_mapper.map_content(choice)
34
+ id = content.delete(:id)
35
+ {
36
+ id: choice[:id] || id,
37
+ role: choice[:role],
38
+ content: [ content ].flatten
39
+ }
40
+ end
41
+ end
42
+ end
43
+ end
44
+ end
45
+ end
46
+ end
47
+ end
@@ -1,6 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require "net/http"
4
+ require "stringio"
4
5
  require "json"
5
6
 
6
7
  module LlmGateway
@@ -19,6 +20,40 @@ module LlmGateway
19
20
  process_response(response)
20
21
  end
21
22
 
23
+ def post_file(url_part, file_contents, filename, purpose: nil, mime_type: "application/octet-stream")
24
+ endpoint = "#{base_endpoint}/#{url_part.sub(%r{^/}, "")}"
25
+ uri = URI.parse(endpoint)
26
+
27
+ file_io = StringIO.new(file_contents)
28
+
29
+ # Create request with full URI (important!)
30
+ request = Net::HTTP::Post.new(uri)
31
+
32
+ form_data = [
33
+ [
34
+ "file",
35
+ file_io,
36
+ { filename: filename, "Content-Type" => mime_type }
37
+ ]
38
+ ]
39
+
40
+ # Add purpose parameter if provided
41
+ form_data << [ "purpose", purpose ] if purpose
42
+
43
+ request.set_form(form_data, "multipart/form-data")
44
+
45
+ # Headers (excluding Content-Type because set_form already sets it)
46
+ multipart_headers = build_headers.reject { |k, _| k.downcase == "content-type" }
47
+ multipart_headers.each { |key, value| request[key] = value }
48
+
49
+ response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == "https") do |http|
50
+ http.request(request)
51
+ end
52
+
53
+
54
+ process_response(response)
55
+ end
56
+
22
57
  def post(url_part, body = nil, extra_headers = {})
23
58
  endpoint = "#{base_endpoint}/#{url_part.sub(%r{^/}, "")}"
24
59
  response = make_request(endpoint, Net::HTTP::Post, body, extra_headers)
@@ -2,11 +2,45 @@
2
2
 
3
3
  module LlmGateway
4
4
  class Client
5
+ def self.provider_configs
6
+ @provider_configs ||= {
7
+ anthropic: {
8
+ input_mapper: LlmGateway::Adapters::Claude::InputMapper,
9
+ output_mapper: LlmGateway::Adapters::Claude::OutputMapper,
10
+ client: LlmGateway::Adapters::Claude::Client,
11
+ file_output_mapper: LlmGateway::Adapters::Claude::FileOutputMapper
12
+ },
13
+ openai: {
14
+ input_mapper: LlmGateway::Adapters::OpenAi::ChatCompletions::InputMapper,
15
+ output_mapper: LlmGateway::Adapters::OpenAi::ChatCompletions::OutputMapper,
16
+ client: LlmGateway::Adapters::OpenAi::Client,
17
+ file_output_mapper: LlmGateway::Adapters::OpenAi::FileOutputMapper
18
+ },
19
+ openai_responses: {
20
+ input_mapper: LlmGateway::Adapters::OpenAi::Responses::InputMapper,
21
+ output_mapper: LlmGateway::Adapters::OpenAi::Responses::OutputMapper,
22
+ client: LlmGateway::Adapters::OpenAi::Client,
23
+ file_output_mapper: LlmGateway::Adapters::OpenAi::FileOutputMapper
24
+ },
25
+ groq: {
26
+ input_mapper: LlmGateway::Adapters::Groq::InputMapper,
27
+ output_mapper: LlmGateway::Adapters::Groq::OutputMapper,
28
+ client: LlmGateway::Adapters::Groq::Client,
29
+ file_output_mapper: nil
30
+ }
31
+ }.freeze
32
+ end
33
+
34
+ def self.get_provider_config(provider_id)
35
+ provider_configs[provider_id.to_sym] || raise(LlmGateway::Errors::UnsupportedProvider, provider_id)
36
+ end
37
+
5
38
  def self.chat(model, message, response_format: "text", tools: nil, system: nil, api_key: nil)
6
- client_klass = client_class(model)
39
+ provider = provider_from_model(model)
40
+ config = get_provider_config(provider)
7
41
  client_options = { model_key: model }
8
42
  client_options[:api_key] = api_key if api_key
9
- client = client_klass.new(**client_options)
43
+ client = config[:client].new(**client_options)
10
44
 
11
45
  input_mapper = input_mapper_for_client(client)
12
46
  normalized_input = input_mapper.map({
@@ -24,28 +58,90 @@ module LlmGateway
24
58
  result_mapper(client).map(result)
25
59
  end
26
60
 
27
- def self.client_class(model)
28
- return LlmGateway::Adapters::Claude::Client if model.start_with?("claude")
29
- return LlmGateway::Adapters::Groq::Client if model.start_with?("llama")
30
- return LlmGateway::Adapters::OpenAi::Client if model.start_with?("gpt") ||
31
- model.start_with?("o4-") ||
32
- model.start_with?("openai")
61
+
62
+ def self.responses(model, message, response_format: "text", tools: nil, system: nil, api_key: nil)
63
+ provider = provider_from_model(model)
64
+ config = provider == "openai" ? get_provider_config("openai_responses") : get_provider_config(provider)
65
+ client_options = { model_key: model }
66
+ client_options[:api_key] = api_key if api_key
67
+ client = config[:client].new(**client_options)
68
+ input_mapper = config[:input_mapper]
69
+ normalized_input = input_mapper.map({
70
+ messages: normalize_messages(message),
71
+ response_format: normalize_response_format(response_format),
72
+ tools: tools,
73
+ system: normalize_system(system)
74
+ })
75
+ method = provider == "openai" ? "responses" : "chat"
76
+ result = client.send(method,
77
+ normalized_input[:messages],
78
+ response_format: normalized_input[:response_format],
79
+ tools: normalized_input[:tools],
80
+ system: normalized_input[:system]
81
+ )
82
+ config[:output_mapper].map(result)
83
+ end
84
+
85
+ def self.build_client(provider, api_key:, model: "none")
86
+ config = get_provider_config(provider)
87
+ client_options = { model_key: model }
88
+ client_options[:api_key] = api_key if api_key
89
+ config[:client].new(**client_options)
90
+ end
91
+
92
+ def self.upload_file(provider, **kwargs)
93
+ api_key = kwargs.delete(:api_key)
94
+ client = build_client(provider, api_key: api_key)
95
+ result = client.upload_file(*kwargs.values)
96
+ config = get_provider_config(provider)
97
+ config[:file_output_mapper].map(result)
98
+ end
99
+
100
+ def self.download_file(provider, **kwargs)
101
+ api_key = kwargs.delete(:api_key)
102
+ client = build_client(provider, api_key: api_key)
103
+ result = client.download_file(*kwargs.values)
104
+ config = get_provider_config(provider)
105
+ config[:file_output_mapper].map(result)
106
+ end
107
+
108
+ def self.provider_from_model(model)
109
+ return "anthropic" if model.start_with?("claude")
110
+ return "groq" if model.start_with?("llama")
111
+ return "openai" if model.start_with?("gpt") ||
112
+ model.start_with?("o4-") ||
113
+ model.start_with?("openai")
33
114
 
34
115
  raise LlmGateway::Errors::UnsupportedModel, model
35
116
  end
36
117
 
37
- def self.input_mapper_for_client(client)
38
- return LlmGateway::Adapters::Claude::InputMapper if client.is_a?(LlmGateway::Adapters::Claude::Client)
39
- return LlmGateway::Adapters::OpenAi::InputMapper if client.is_a?(LlmGateway::Adapters::OpenAi::Client)
40
118
 
41
- LlmGateway::Adapters::Groq::InputMapper if client.is_a?(LlmGateway::Adapters::Groq::Client)
119
+ def self.input_mapper_for_client(client)
120
+ config = get_provider_config_by_client(client)
121
+ config[:input_mapper]
42
122
  end
43
123
 
44
124
  def self.result_mapper(client)
45
- return LlmGateway::Adapters::Claude::OutputMapper if client.is_a?(LlmGateway::Adapters::Claude::Client)
46
- return LlmGateway::Adapters::OpenAi::OutputMapper if client.is_a?(LlmGateway::Adapters::OpenAi::Client)
125
+ config = get_provider_config_by_client(client)
126
+ config[:output_mapper]
127
+ end
128
+
129
+ def self.provider_id_from_client(client)
130
+ case client
131
+ when LlmGateway::Adapters::Claude::Client
132
+ "anthropic"
133
+ when LlmGateway::Adapters::OpenAi::Client
134
+ "openai"
135
+ when LlmGateway::Adapters::Groq::Client
136
+ "groq"
137
+ else
138
+ raise LlmGateway::Errors::UnsupportedProvider, client.class.name
139
+ end
140
+ end
47
141
 
48
- LlmGateway::Adapters::Groq::OutputMapper if client.is_a?(LlmGateway::Adapters::Groq::Client)
142
+ def self.get_provider_config_by_client(client)
143
+ provider_id = provider_id_from_client(client)
144
+ get_provider_config(provider_id)
49
145
  end
50
146
 
51
147
  def self.normalize_system(system)
@@ -28,6 +28,8 @@ module LlmGateway
28
28
  class UnknownError < ClientError; end
29
29
  class PromptTooLong < BadRequestError; end
30
30
  class UnsupportedModel < ClientError; end
31
+ class UnsupportedProvider < ClientError; end
32
+ class MissingMapperForProvider < ClientError; end
31
33
 
32
34
  class PromptError < BaseError; end
33
35
 
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module LlmGateway
4
- VERSION = "0.2.0"
4
+ VERSION = "0.3.0"
5
5
  end
data/lib/llm_gateway.rb CHANGED
@@ -12,13 +12,21 @@ require_relative "llm_gateway/tool"
12
12
  require_relative "llm_gateway/adapters/claude/client"
13
13
  require_relative "llm_gateway/adapters/claude/input_mapper"
14
14
  require_relative "llm_gateway/adapters/claude/output_mapper"
15
+ require_relative "llm_gateway/adapters/open_ai/client"
16
+ require_relative "llm_gateway/adapters/open_ai/file_output_mapper"
17
+ require_relative "llm_gateway/adapters/open_ai/chat_completions/input_mapper"
18
+ require_relative "llm_gateway/adapters/open_ai/chat_completions/output_mapper"
15
19
  require_relative "llm_gateway/adapters/groq/client"
16
20
  require_relative "llm_gateway/adapters/groq/input_mapper"
17
21
  require_relative "llm_gateway/adapters/groq/output_mapper"
18
- require_relative "llm_gateway/adapters/open_ai/client"
19
- require_relative "llm_gateway/adapters/open_ai/input_mapper"
20
- require_relative "llm_gateway/adapters/open_ai/output_mapper"
22
+ require_relative "llm_gateway/adapters/open_ai/file_output_mapper"
23
+ require_relative "llm_gateway/adapters/open_ai/responses/input_mapper"
24
+ require_relative "llm_gateway/adapters/open_ai/responses/output_mapper"
21
25
 
22
26
  module LlmGateway
23
27
  class Error < StandardError; end
28
+
29
+ # Direction constants for message mappers
30
+ DIRECTION_IN = :in
31
+ DIRECTION_OUT = :out
24
32
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: llm_gateway
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.0
4
+ version: 0.3.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - billybonks
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2025-08-08 00:00:00.000000000 Z
11
+ date: 2025-08-19 00:00:00.000000000 Z
12
12
  dependencies: []
13
13
  description: LlmGateway provides a consistent Ruby interface for multiple LLM providers
14
14
  including Claude, OpenAI, and Groq. Features include unified response formatting,
@@ -26,15 +26,22 @@ files:
26
26
  - README.md
27
27
  - Rakefile
28
28
  - lib/llm_gateway.rb
29
+ - lib/llm_gateway/adapters/claude/bidirectional_message_mapper.rb
29
30
  - lib/llm_gateway/adapters/claude/client.rb
30
31
  - lib/llm_gateway/adapters/claude/input_mapper.rb
31
32
  - lib/llm_gateway/adapters/claude/output_mapper.rb
33
+ - lib/llm_gateway/adapters/groq/bidirectional_message_mapper.rb
32
34
  - lib/llm_gateway/adapters/groq/client.rb
33
35
  - lib/llm_gateway/adapters/groq/input_mapper.rb
34
36
  - lib/llm_gateway/adapters/groq/output_mapper.rb
37
+ - lib/llm_gateway/adapters/open_ai/chat_completions/bidirectional_message_mapper.rb
38
+ - lib/llm_gateway/adapters/open_ai/chat_completions/input_mapper.rb
39
+ - lib/llm_gateway/adapters/open_ai/chat_completions/output_mapper.rb
35
40
  - lib/llm_gateway/adapters/open_ai/client.rb
36
- - lib/llm_gateway/adapters/open_ai/input_mapper.rb
37
- - lib/llm_gateway/adapters/open_ai/output_mapper.rb
41
+ - lib/llm_gateway/adapters/open_ai/file_output_mapper.rb
42
+ - lib/llm_gateway/adapters/open_ai/responses/bidirectional_message_mapper.rb
43
+ - lib/llm_gateway/adapters/open_ai/responses/input_mapper.rb
44
+ - lib/llm_gateway/adapters/open_ai/responses/output_mapper.rb
38
45
  - lib/llm_gateway/base_client.rb
39
46
  - lib/llm_gateway/client.rb
40
47
  - lib/llm_gateway/errors.rb