intelligence 0.6.0 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +576 -0
- data/intelligence.gemspec +2 -1
- data/lib/intelligence/adapter/base.rb +13 -6
- data/lib/intelligence/adapter/class_methods.rb +15 -0
- data/lib/intelligence/adapter/module_methods.rb +41 -0
- data/lib/intelligence/adapter.rb +2 -2
- data/lib/intelligence/adapters/anthropic/adapter.rb +21 -19
- data/lib/intelligence/adapters/anthropic/chat_request_methods.rb +189 -0
- data/lib/intelligence/adapters/anthropic/{chat_methods.rb → chat_response_methods.rb} +13 -137
- data/lib/intelligence/adapters/cerebras.rb +19 -19
- data/lib/intelligence/adapters/generic/adapter.rb +4 -2
- data/lib/intelligence/adapters/generic/chat_request_methods.rb +221 -0
- data/lib/intelligence/adapters/generic/chat_response_methods.rb +234 -0
- data/lib/intelligence/adapters/generic.rb +1 -1
- data/lib/intelligence/adapters/google/adapter.rb +33 -22
- data/lib/intelligence/adapters/google/chat_request_methods.rb +234 -0
- data/lib/intelligence/adapters/google/chat_response_methods.rb +236 -0
- data/lib/intelligence/adapters/groq.rb +29 -49
- data/lib/intelligence/adapters/hyperbolic.rb +13 -39
- data/lib/intelligence/adapters/mistral.rb +21 -42
- data/lib/intelligence/adapters/open_ai/adapter.rb +39 -32
- data/lib/intelligence/adapters/open_ai/chat_request_methods.rb +186 -0
- data/lib/intelligence/adapters/open_ai/chat_response_methods.rb +239 -0
- data/lib/intelligence/adapters/open_ai.rb +1 -1
- data/lib/intelligence/adapters/open_router.rb +18 -18
- data/lib/intelligence/adapters/samba_nova.rb +16 -18
- data/lib/intelligence/adapters/together_ai.rb +25 -23
- data/lib/intelligence/conversation.rb +11 -10
- data/lib/intelligence/message.rb +45 -29
- data/lib/intelligence/message_content/base.rb +2 -9
- data/lib/intelligence/message_content/binary.rb +3 -3
- data/lib/intelligence/message_content/file.rb +3 -3
- data/lib/intelligence/message_content/text.rb +10 -2
- data/lib/intelligence/message_content/tool_call.rb +61 -5
- data/lib/intelligence/message_content/tool_result.rb +11 -6
- data/lib/intelligence/tool.rb +139 -0
- data/lib/intelligence/version.rb +1 -1
- data/lib/intelligence.rb +3 -1
- metadata +31 -13
- data/lib/intelligence/adapter/class_methods/construction.rb +0 -17
- data/lib/intelligence/adapter/module_methods/construction.rb +0 -43
- data/lib/intelligence/adapters/generic/chat_methods.rb +0 -355
- data/lib/intelligence/adapters/google/chat_methods.rb +0 -393
- data/lib/intelligence/adapters/legacy/adapter.rb +0 -11
- data/lib/intelligence/adapters/legacy/chat_methods.rb +0 -54
- data/lib/intelligence/adapters/open_ai/chat_methods.rb +0 -345
@@ -1,57 +1,36 @@
|
|
1
|
-
require_relative '
|
1
|
+
require_relative 'generic/adapter'
|
2
2
|
|
3
3
|
module Intelligence
|
4
4
|
module Mistral
|
5
|
-
|
6
|
-
class Adapter < Legacy::Adapter
|
5
|
+
class Adapter < Generic::Adapter
|
7
6
|
|
8
7
|
chat_request_uri "https://api.mistral.ai/v1/chat/completions"
|
9
8
|
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
9
|
+
schema do
|
10
|
+
key String
|
11
|
+
chat_options do
|
12
|
+
model String
|
13
|
+
temperature Float
|
14
|
+
top_p Float
|
15
|
+
max_tokens Integer
|
16
|
+
min_tokens Integer
|
17
|
+
seed Integer, as: :random_seed
|
18
|
+
stop String, array: true
|
19
|
+
stream [ TrueClass, FalseClass ]
|
21
20
|
|
22
|
-
|
23
|
-
|
24
|
-
|
21
|
+
random_seed Integer
|
22
|
+
response_format do
|
23
|
+
type String
|
25
24
|
end
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
25
|
+
tool_choice do
|
26
|
+
type String
|
27
|
+
function do
|
28
|
+
name String
|
30
29
|
end
|
31
30
|
end
|
32
31
|
end
|
33
32
|
end
|
34
|
-
|
35
|
-
alias chat_request_generic_message_attributes chat_request_message_attributes
|
36
|
-
|
37
|
-
# mistral vision models only support the legacy Open AI message schema for the assistant
|
38
|
-
# messages while supporting the modern message schema for user messages :facepalm:
|
39
|
-
def chat_request_message_attributes( message )
|
40
|
-
role = message[ :role ]&.to_sym
|
41
|
-
case role
|
42
|
-
when :user
|
43
|
-
chat_request_generic_message_attributes( message )
|
44
|
-
when :assistant
|
45
|
-
chat_request_legacy_message_attributes( message )
|
46
|
-
else
|
47
|
-
raise UnsupportedContentError.new(
|
48
|
-
:mistral,
|
49
|
-
'only supports user and assistant message roles'
|
50
|
-
)
|
51
|
-
end
|
52
|
-
end
|
53
|
-
|
33
|
+
|
54
34
|
end
|
55
|
-
|
56
35
|
end
|
57
36
|
end
|
@@ -1,60 +1,67 @@
|
|
1
|
-
require_relative '
|
1
|
+
require_relative 'chat_request_methods'
|
2
|
+
require_relative 'chat_response_methods'
|
2
3
|
|
3
4
|
module Intelligence
|
4
5
|
module OpenAi
|
5
6
|
class Adapter < Adapter::Base
|
6
7
|
|
7
|
-
|
8
|
+
schema do
|
8
9
|
|
9
10
|
# normalized properties for all endpoints
|
10
|
-
|
11
|
+
key String
|
11
12
|
|
12
13
|
# openai properties for all endpoints
|
13
|
-
|
14
|
-
|
14
|
+
organization String
|
15
|
+
project String
|
15
16
|
|
16
17
|
# properties for generative text endpoints
|
17
|
-
|
18
|
+
chat_options do
|
18
19
|
|
19
20
|
# normalized properties for openai generative text endpoint
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
21
|
+
model String, requried: true
|
22
|
+
n Integer
|
23
|
+
max_tokens Integer, as: :max_completion_tokens
|
24
|
+
temperature Float
|
25
|
+
top_p Float
|
26
|
+
seed Integer
|
27
|
+
stop String, array: true
|
28
|
+
stream [ TrueClass, FalseClass ]
|
29
|
+
|
30
|
+
frequency_penalty Float
|
31
|
+
presence_penalty Float
|
31
32
|
|
32
33
|
# openai variant of normalized properties for openai generative text endpoints
|
33
|
-
|
34
|
+
max_completion_tokens Integer
|
34
35
|
|
35
36
|
# openai properties for openai generative text endpoint
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
37
|
+
audio do
|
38
|
+
voice String
|
39
|
+
format String
|
40
|
+
end
|
41
|
+
logit_bias
|
42
|
+
logprobs [ TrueClass, FalseClass ]
|
43
|
+
modalities String, array: true
|
44
|
+
# the parallel_tool_calls parameter is only allowed when 'tools' are specified
|
45
|
+
parallel_tool_calls [ TrueClass, FalseClass ]
|
46
|
+
response_format do
|
40
47
|
# 'text' and 'json_schema' are the only supported types
|
41
|
-
|
42
|
-
|
48
|
+
type Symbol, in: [ :text, :json_schema ]
|
49
|
+
json_schema
|
43
50
|
end
|
44
|
-
|
45
|
-
|
46
|
-
|
51
|
+
service_tier String
|
52
|
+
stream_options do
|
53
|
+
include_usage [ TrueClass, FalseClass ]
|
47
54
|
end
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
parameter :user
|
55
|
+
tool_choice
|
56
|
+
top_logprobs Integer
|
57
|
+
user
|
52
58
|
|
53
59
|
end
|
54
60
|
|
55
61
|
end
|
56
62
|
|
57
|
-
include
|
63
|
+
include ChatRequestMethods
|
64
|
+
include ChatResponseMethods
|
58
65
|
|
59
66
|
end
|
60
67
|
end
|
@@ -0,0 +1,186 @@
|
|
1
|
+
module Intelligence
|
2
|
+
module OpenAi
|
3
|
+
module ChatRequestMethods
|
4
|
+
|
5
|
+
CHAT_REQUEST_URI = "https://api.openai.com/v1/chat/completions"
|
6
|
+
|
7
|
+
SUPPORTED_CONTENT_TYPES = [ 'image/jpeg', 'image/png' ]
|
8
|
+
|
9
|
+
def chat_request_uri( options )
|
10
|
+
CHAT_REQUEST_URI
|
11
|
+
end
|
12
|
+
|
13
|
+
def chat_request_headers( options = {} )
|
14
|
+
options = @options.merge( build_options( options ) )
|
15
|
+
result = {}
|
16
|
+
|
17
|
+
key = options[ :key ]
|
18
|
+
organization = options[ :organization ]
|
19
|
+
project = options[ :project ]
|
20
|
+
|
21
|
+
raise ArgumentError.new( "An OpenAI key is required to build an OpenAI chat request." ) \
|
22
|
+
if key.nil?
|
23
|
+
|
24
|
+
result[ 'Content-Type' ] = 'application/json'
|
25
|
+
result[ 'Authorization' ] = "Bearer #{key}"
|
26
|
+
result[ 'OpenAI-Organization' ] = organization unless organization.nil?
|
27
|
+
result[ 'OpenAI-Project' ] = project unless project.nil?
|
28
|
+
|
29
|
+
result
|
30
|
+
end
|
31
|
+
|
32
|
+
def chat_request_body( conversation, options = {} )
|
33
|
+
options = @options.merge( build_options( options ) )
|
34
|
+
result = options[ :chat_options ]&.compact || {}
|
35
|
+
result[ :messages ] = []
|
36
|
+
|
37
|
+
system_message = to_open_ai_system_message( conversation[ :system_message ] )
|
38
|
+
result[ :messages ] << { role: 'system', content: system_message } if system_message
|
39
|
+
|
40
|
+
conversation[ :messages ]&.each do | message |
|
41
|
+
|
42
|
+
result_message = { role: message[ :role ] }
|
43
|
+
result_message_content = []
|
44
|
+
|
45
|
+
message[ :contents ]&.each do | content |
|
46
|
+
case content[ :type ]
|
47
|
+
when :text
|
48
|
+
result_message_content << { type: 'text', text: content[ :text ] }
|
49
|
+
when :binary
|
50
|
+
content_type = content[ :content_type ]
|
51
|
+
bytes = content[ :bytes ]
|
52
|
+
if content_type && bytes
|
53
|
+
if SUPPORTED_CONTENT_TYPES.include?( content_type )
|
54
|
+
result_message_content << {
|
55
|
+
type: 'image_url',
|
56
|
+
image_url: {
|
57
|
+
url: "data:#{content_type};base64,#{Base64.strict_encode64( bytes )}".freeze
|
58
|
+
}
|
59
|
+
}
|
60
|
+
else
|
61
|
+
raise UnsupportedContentError.new(
|
62
|
+
:open_ai,
|
63
|
+
"only supports content of type #{SUPPORTED_CONTENT_TYPES.join( ', ' )}"
|
64
|
+
)
|
65
|
+
end
|
66
|
+
else
|
67
|
+
raise UnsupportedContentError.new(
|
68
|
+
:open_ai,
|
69
|
+
'requires binary content to include content type and ( packed ) bytes'
|
70
|
+
)
|
71
|
+
end
|
72
|
+
when :file
|
73
|
+
content_type = content[ :content_type ]
|
74
|
+
uri = content[ :uri ]
|
75
|
+
if content_type && uri
|
76
|
+
if SUPPORTED_CONTENT_TYPES.include?( content_type )
|
77
|
+
result_message_content << {
|
78
|
+
type: 'image_url',
|
79
|
+
image_url: {
|
80
|
+
url: uri
|
81
|
+
}
|
82
|
+
}
|
83
|
+
else
|
84
|
+
raise UnsupportedContentError.new(
|
85
|
+
:open_ai,
|
86
|
+
"only supports content of type #{SUPPORTED_CONTENT_TYPES.join( ', ' )}"
|
87
|
+
)
|
88
|
+
end
|
89
|
+
else
|
90
|
+
raise UnsupportedContentError.new(
|
91
|
+
:open_ai,
|
92
|
+
'requires file content to include content type and uri'
|
93
|
+
)
|
94
|
+
end
|
95
|
+
when :tool_call
|
96
|
+
tool_calls = result_message[ :tool_calls ] || []
|
97
|
+
function = {
|
98
|
+
name: content[ :tool_name ]
|
99
|
+
}
|
100
|
+
function[ :arguments ] = JSON.generate( content[ :tool_parameters ] || {} )
|
101
|
+
tool_calls << { id: content[ :tool_call_id ], type: 'function', function: function }
|
102
|
+
result_message[ :tool_calls ] = tool_calls
|
103
|
+
when :tool_result
|
104
|
+
# open-ai returns tool results as a message with a role of 'tool'
|
105
|
+
result[ :messages ] << {
|
106
|
+
role: :tool,
|
107
|
+
tool_call_id: content[ :tool_call_id ],
|
108
|
+
content: content[ :tool_result ]
|
109
|
+
}
|
110
|
+
else
|
111
|
+
raise InvalidContentError.new( :open_ai )
|
112
|
+
end
|
113
|
+
|
114
|
+
end
|
115
|
+
|
116
|
+
result_message[ :content ] = result_message_content
|
117
|
+
result[ :messages ] << result_message \
|
118
|
+
if result_message[ :content ]&.any? || result_message[ :tool_calls ]&.any?
|
119
|
+
result
|
120
|
+
|
121
|
+
end
|
122
|
+
|
123
|
+
tools_attributes = chat_request_tools_attributes( conversation[ :tools ] )
|
124
|
+
result[ :tools ] = tools_attributes if tools_attributes && tools_attributes.length > 0
|
125
|
+
|
126
|
+
JSON.generate( result )
|
127
|
+
end
|
128
|
+
|
129
|
+
def chat_request_tools_attributes( tools )
|
130
|
+
properties_array_to_object = lambda do | properties |
|
131
|
+
return nil unless properties&.any?
|
132
|
+
object = {}
|
133
|
+
required = []
|
134
|
+
properties.each do | property |
|
135
|
+
name = property.delete( :name )
|
136
|
+
required << name if property.delete( :required )
|
137
|
+
if property[ :properties ]&.any?
|
138
|
+
property_properties, property_required =
|
139
|
+
properties_array_to_object.call( property[ :properties ] )
|
140
|
+
property[ :properties ] = property_properties
|
141
|
+
property[ :required ] = property_required if property_required.any?
|
142
|
+
end
|
143
|
+
object[ name ] = property
|
144
|
+
end
|
145
|
+
[ object, required.compact ]
|
146
|
+
end
|
147
|
+
|
148
|
+
tools&.map do | tool |
|
149
|
+
function = {
|
150
|
+
type: 'function',
|
151
|
+
function: {
|
152
|
+
name: tool[ :name ],
|
153
|
+
description: tool[ :description ],
|
154
|
+
}
|
155
|
+
}
|
156
|
+
|
157
|
+
if tool[ :properties ]&.any?
|
158
|
+
properties_object, properties_required =
|
159
|
+
properties_array_to_object.call( tool[ :properties ] )
|
160
|
+
function[ :function ][ :parameters ] = {
|
161
|
+
type: 'object',
|
162
|
+
properties: properties_object
|
163
|
+
}
|
164
|
+
function[ :function ][ :parameters ][ :required ] = properties_required \
|
165
|
+
if properties_required.any?
|
166
|
+
end
|
167
|
+
function
|
168
|
+
end
|
169
|
+
end
|
170
|
+
|
171
|
+
private
|
172
|
+
|
173
|
+
def to_open_ai_system_message( system_message )
|
174
|
+
return nil if system_message.nil?
|
175
|
+
|
176
|
+
result = ''
|
177
|
+
system_message[ :contents ].each do | content |
|
178
|
+
result += content[ :text ] if content[ :type ] == :text
|
179
|
+
end
|
180
|
+
|
181
|
+
result.empty? ? nil : result
|
182
|
+
end
|
183
|
+
|
184
|
+
end
|
185
|
+
end
|
186
|
+
end
|
@@ -0,0 +1,239 @@
|
|
1
|
+
module Intelligence
|
2
|
+
module OpenAi
|
3
|
+
module ChatResponseMethods
|
4
|
+
|
5
|
+
def chat_result_attributes( response )
|
6
|
+
return nil unless response.success?
|
7
|
+
response_json = JSON.parse( response.body, symbolize_names: true ) rescue nil
|
8
|
+
return nil \
|
9
|
+
if response_json.nil? || response_json[ :choices ].nil?
|
10
|
+
|
11
|
+
result = {}
|
12
|
+
result[ :choices ] = []
|
13
|
+
|
14
|
+
( response_json[ :choices ] || [] ).each do | json_choice |
|
15
|
+
json_message = json_choice[ :message ]
|
16
|
+
result_message = nil
|
17
|
+
if ( json_message )
|
18
|
+
result_message = { role: json_message[ :role ] }
|
19
|
+
if json_message[ :content ]
|
20
|
+
result_message[ :contents ] = [ { type: :text, text: json_message[ :content ] } ]
|
21
|
+
end
|
22
|
+
if json_message[ :tool_calls ] && !json_message[ :tool_calls ].empty?
|
23
|
+
result_message[ :contents ] ||= []
|
24
|
+
json_message[ :tool_calls ].each do | json_message_tool_call |
|
25
|
+
result_message_tool_call_parameters =
|
26
|
+
JSON.parse( json_message_tool_call[ :function ][ :arguments ], symbolize_names: true ) \
|
27
|
+
rescue json_message_tool_call[ :function ][ :arguments ]
|
28
|
+
result_message[ :contents ] << {
|
29
|
+
type: :tool_call,
|
30
|
+
tool_call_id: json_message_tool_call[ :id ],
|
31
|
+
tool_name: json_message_tool_call[ :function ][ :name ],
|
32
|
+
tool_parameters: result_message_tool_call_parameters
|
33
|
+
}
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
result[ :choices ].push( {
|
38
|
+
end_reason: translate_end_result( json_choice[ :finish_reason ] ),
|
39
|
+
message: result_message
|
40
|
+
} )
|
41
|
+
end
|
42
|
+
|
43
|
+
metrics_json = response_json[ :usage ]
|
44
|
+
unless metrics_json.nil?
|
45
|
+
|
46
|
+
metrics = {}
|
47
|
+
metrics[ :input_tokens ] = metrics_json[ :prompt_tokens ]
|
48
|
+
metrics[ :output_tokens ] = metrics_json[ :completion_tokens ]
|
49
|
+
metrics = metrics.compact
|
50
|
+
|
51
|
+
result[ :metrics ] = metrics unless metrics.empty?
|
52
|
+
|
53
|
+
end
|
54
|
+
|
55
|
+
result
|
56
|
+
end
|
57
|
+
|
58
|
+
def chat_result_error_attributes( response )
|
59
|
+
error_type, error_description = translate_error_response_status( response.status )
|
60
|
+
result = {
|
61
|
+
error_type: error_type.to_s,
|
62
|
+
error_description: error_description
|
63
|
+
}
|
64
|
+
|
65
|
+
parsed_body = JSON.parse( response.body, symbolize_names: true ) rescue nil
|
66
|
+
if parsed_body && parsed_body.respond_to?( :include? ) && parsed_body.include?( :error )
|
67
|
+
result = {
|
68
|
+
error_type: error_type.to_s,
|
69
|
+
error: parsed_body[ :error ][ :code ] || error_type.to_s,
|
70
|
+
error_description: parsed_body[ :error ][ :message ] || error_description
|
71
|
+
}
|
72
|
+
end
|
73
|
+
|
74
|
+
result
|
75
|
+
end
|
76
|
+
|
77
|
+
def stream_result_chunk_attributes( context, chunk )
|
78
|
+
context ||= {}
|
79
|
+
buffer = context[ :buffer ] || ''
|
80
|
+
metrics = context[ :metrics ] || {
|
81
|
+
input_tokens: 0,
|
82
|
+
output_tokens: 0
|
83
|
+
}
|
84
|
+
choices = context[ :choices ] || Array.new( 1 , { message: {} } )
|
85
|
+
|
86
|
+
choices.each do | choice |
|
87
|
+
choice[ :message ][ :contents ] = choice[ :message ][ :contents ]&.map do | content |
|
88
|
+
{ type: content[ :type ] }
|
89
|
+
end
|
90
|
+
end
|
91
|
+
|
92
|
+
buffer += chunk
|
93
|
+
while ( eol_index = buffer.index( "\n" ) )
|
94
|
+
line = buffer.slice!( 0..eol_index )
|
95
|
+
line = line.strip
|
96
|
+
next if line.empty? || !line.start_with?( 'data:' )
|
97
|
+
line = line[ 6..-1 ]
|
98
|
+
|
99
|
+
next if line.end_with?( '[DONE]' )
|
100
|
+
data = JSON.parse( line ) rescue nil
|
101
|
+
|
102
|
+
if data.is_a?( Hash )
|
103
|
+
data[ 'choices' ]&.each do | data_choice |
|
104
|
+
|
105
|
+
data_choice_index = data_choice[ 'index' ]
|
106
|
+
data_choice_delta = data_choice[ 'delta' ]
|
107
|
+
data_choice_finish_reason = data_choice[ 'finish_reason' ]
|
108
|
+
|
109
|
+
choices.fill( { message: {} }, choices.size, data_choice_index + 1 ) \
|
110
|
+
if choices.size <= data_choice_index
|
111
|
+
contents = choices[ data_choice_index ][ :message ][ :contents ] || []
|
112
|
+
|
113
|
+
text_content = contents.first&.[]( :type ) == :text ? contents.first : nil
|
114
|
+
if data_choice_content = data_choice_delta[ 'content' ]
|
115
|
+
if text_content.nil?
|
116
|
+
contents.unshift( text_content = { type: :text, text: data_choice_content } )
|
117
|
+
else
|
118
|
+
text_content[ :text ] = ( text_content[ :text ] || '' ) + data_choice_content
|
119
|
+
end
|
120
|
+
end
|
121
|
+
if data_choice_tool_calls = data_choice_delta[ 'tool_calls' ]
|
122
|
+
data_choice_tool_calls.each do | data_choice_tool_call |
|
123
|
+
if data_choice_tool_call_function = data_choice_tool_call[ 'function' ]
|
124
|
+
data_choice_tool_index = data_choice_tool_call[ 'index' ]
|
125
|
+
data_choice_tool_id = data_choice_tool_call[ 'id' ]
|
126
|
+
data_choice_tool_name = data_choice_tool_call_function[ 'name' ]
|
127
|
+
data_choice_tool_parameters = data_choice_tool_call_function[ 'arguments' ]
|
128
|
+
|
129
|
+
tool_call_content_index = ( text_content.nil? ? 0 : 1 ) + data_choice_tool_index
|
130
|
+
if tool_call_content_index >= contents.length
|
131
|
+
contents.push( {
|
132
|
+
type: :tool_call,
|
133
|
+
tool_call_id: data_choice_tool_id,
|
134
|
+
tool_name: data_choice_tool_name,
|
135
|
+
tool_parameters: data_choice_tool_parameters
|
136
|
+
} )
|
137
|
+
else
|
138
|
+
tool_call = contents[ tool_call_content_index ]
|
139
|
+
tool_call[ :tool_call_id ] = ( tool_call[ :tool_call_id ] || '' ) + data_choice_tool_id \
|
140
|
+
if data_choice_tool_id
|
141
|
+
tool_call[ :tool_name ] = ( tool_call[ :tool_name ] || '' ) + data_choice_tool_name \
|
142
|
+
if data_choice_tool_name
|
143
|
+
tool_call[ :tool_parameters ] = ( tool_call[ :tool_parameters ] || '' ) + data_choice_tool_parameters \
|
144
|
+
if data_choice_tool_parameters
|
145
|
+
end
|
146
|
+
end
|
147
|
+
end
|
148
|
+
end
|
149
|
+
choices[ data_choice_index ][ :message ][ :contents ] = contents
|
150
|
+
choices[ data_choice_index ][ :end_reason ] ||=
|
151
|
+
translate_end_result( data_choice_finish_reason )
|
152
|
+
end
|
153
|
+
|
154
|
+
if usage = data[ 'usage' ]
|
155
|
+
metrics[ :input_tokens ] += usage[ 'prompt_tokens' ]
|
156
|
+
metrics[ :output_tokens ] += usage[ 'completion_tokens' ]
|
157
|
+
end
|
158
|
+
|
159
|
+
end
|
160
|
+
|
161
|
+
end
|
162
|
+
|
163
|
+
context[ :buffer ] = buffer
|
164
|
+
context[ :metrics ] = metrics
|
165
|
+
context[ :choices ] = choices
|
166
|
+
|
167
|
+
[ context, choices.empty? ? nil : { choices: choices.dup } ]
|
168
|
+
end
|
169
|
+
|
170
|
+
def stream_result_attributes( context )
|
171
|
+
choices = context[ :choices ]
|
172
|
+
metrics = context[ :metrics ]
|
173
|
+
|
174
|
+
choices = choices.map do | choice |
|
175
|
+
{ end_reason: choice[ :end_reason ] }
|
176
|
+
end
|
177
|
+
|
178
|
+
{ choices: choices, metrics: context[ :metrics ] }
|
179
|
+
end
|
180
|
+
|
181
|
+
alias_method :stream_result_error_attributes, :chat_result_error_attributes
|
182
|
+
|
183
|
+
private
|
184
|
+
|
185
|
+
def translate_end_result( end_result )
|
186
|
+
case end_result
|
187
|
+
when 'stop'
|
188
|
+
:ended
|
189
|
+
when 'length'
|
190
|
+
:token_limit_exceeded
|
191
|
+
when 'tool_calls'
|
192
|
+
:tool_called
|
193
|
+
when 'content_filter'
|
194
|
+
:filtered
|
195
|
+
else
|
196
|
+
nil
|
197
|
+
end
|
198
|
+
end
|
199
|
+
|
200
|
+
def translate_error_response_status( status )
|
201
|
+
case status
|
202
|
+
when 400
|
203
|
+
[ :invalid_request_error,
|
204
|
+
"There was an issue with the format or content of your request." ]
|
205
|
+
when 401
|
206
|
+
[ :authentication_error,
|
207
|
+
"There's an issue with your API key." ]
|
208
|
+
when 403
|
209
|
+
[ :permission_error,
|
210
|
+
"Your API key does not have permission to use the specified resource." ]
|
211
|
+
when 404
|
212
|
+
[ :not_found_error,
|
213
|
+
"The requested resource was not found." ]
|
214
|
+
when 413
|
215
|
+
[ :request_too_large,
|
216
|
+
"Request exceeds the maximum allowed number of bytes." ]
|
217
|
+
when 422
|
218
|
+
[ :invalid_request_error,
|
219
|
+
"There was an issue with the format or content of your request." ]
|
220
|
+
when 429
|
221
|
+
[ :rate_limit_error,
|
222
|
+
"Your account has hit a rate limit." ]
|
223
|
+
when 500, 502, 503
|
224
|
+
[ :api_error,
|
225
|
+
"An unexpected error has occurred internal to the providers systems." ]
|
226
|
+
when 529
|
227
|
+
[ :overloaded_error,
|
228
|
+
"The providers server is temporarily overloaded." ]
|
229
|
+
else
|
230
|
+
[ :unknown_error, "
|
231
|
+
An unknown error occurred." ]
|
232
|
+
end
|
233
|
+
end
|
234
|
+
|
235
|
+
end
|
236
|
+
|
237
|
+
end
|
238
|
+
|
239
|
+
end
|
@@ -1,2 +1,2 @@
|
|
1
1
|
require_relative '../adapter'
|
2
|
-
require_relative 'open_ai/adapter'
|
2
|
+
require_relative 'open_ai/adapter'
|
@@ -7,25 +7,25 @@ module Intelligence
|
|
7
7
|
|
8
8
|
chat_request_uri "https://openrouter.ai/api/v1/chat/completions"
|
9
9
|
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
10
|
+
schema do
|
11
|
+
key String
|
12
|
+
chat_options do
|
13
|
+
model String
|
14
|
+
temperature Float
|
15
|
+
top_k Integer
|
16
|
+
top_p Float
|
17
|
+
max_tokens Integer
|
18
|
+
seed Integer
|
19
|
+
stop String, array: true
|
20
|
+
stream [ TrueClass, FalseClass ]
|
21
|
+
frequency_penalty Float
|
22
|
+
repetition_penalty Float
|
23
|
+
presence_penalty Float
|
24
24
|
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
25
|
+
provider do
|
26
|
+
order String, array: true
|
27
|
+
require_parameters [ TrueClass, FalseClass ]
|
28
|
+
allow_fallbacks [ TrueClass, FalseClass ]
|
29
29
|
end
|
30
30
|
end
|
31
31
|
end
|