intelligence 0.5.0 → 0.7.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +555 -0
- data/intelligence.gemspec +1 -1
- data/lib/intelligence/adapter/base.rb +23 -3
- data/lib/intelligence/adapter/class_methods.rb +15 -0
- data/lib/intelligence/adapter/{construction_methods.rb → module_methods.rb} +8 -4
- data/lib/intelligence/adapter.rb +2 -2
- data/lib/intelligence/adapters/anthropic/adapter.rb +21 -30
- data/lib/intelligence/adapters/anthropic/chat_request_methods.rb +189 -0
- data/lib/intelligence/adapters/anthropic/{chat_methods.rb → chat_response_methods.rb} +8 -124
- data/lib/intelligence/adapters/cerebras.rb +17 -17
- data/lib/intelligence/adapters/generic/adapter.rb +1 -12
- data/lib/intelligence/adapters/generic/chat_methods.rb +42 -11
- data/lib/intelligence/adapters/generic.rb +1 -1
- data/lib/intelligence/adapters/google/adapter.rb +33 -35
- data/lib/intelligence/adapters/google/chat_request_methods.rb +233 -0
- data/lib/intelligence/adapters/google/{chat_methods.rb → chat_response_methods.rb} +52 -162
- data/lib/intelligence/adapters/groq.rb +46 -28
- data/lib/intelligence/adapters/hyperbolic.rb +13 -13
- data/lib/intelligence/adapters/legacy/adapter.rb +0 -2
- data/lib/intelligence/adapters/legacy/chat_methods.rb +22 -6
- data/lib/intelligence/adapters/mistral.rb +57 -0
- data/lib/intelligence/adapters/open_ai/adapter.rb +38 -45
- data/lib/intelligence/adapters/open_ai/chat_request_methods.rb +186 -0
- data/lib/intelligence/adapters/open_ai/{chat_methods.rb → chat_response_methods.rb} +60 -131
- data/lib/intelligence/adapters/open_ai.rb +1 -1
- data/lib/intelligence/adapters/open_router.rb +62 -0
- data/lib/intelligence/adapters/samba_nova.rb +13 -13
- data/lib/intelligence/adapters/together_ai.rb +21 -19
- data/lib/intelligence/chat_request.rb +57 -7
- data/lib/intelligence/chat_result.rb +4 -0
- data/lib/intelligence/chat_result_choice.rb +4 -2
- data/lib/intelligence/conversation.rb +38 -9
- data/lib/intelligence/message.rb +103 -20
- data/lib/intelligence/message_content/base.rb +3 -0
- data/lib/intelligence/message_content/binary.rb +6 -0
- data/lib/intelligence/message_content/file.rb +35 -0
- data/lib/intelligence/message_content/text.rb +5 -0
- data/lib/intelligence/message_content/tool_call.rb +12 -1
- data/lib/intelligence/message_content/tool_result.rb +15 -3
- data/lib/intelligence/message_content.rb +12 -3
- data/lib/intelligence/tool.rb +139 -0
- data/lib/intelligence/version.rb +1 -1
- data/lib/intelligence.rb +6 -4
- metadata +18 -9
@@ -1,57 +1,55 @@
|
|
1
|
-
require_relative '
|
1
|
+
require_relative 'chat_request_methods'
|
2
|
+
require_relative 'chat_response_methods'
|
2
3
|
|
3
4
|
module Intelligence
|
4
5
|
module Google
|
5
6
|
class Adapter < Adapter::Base
|
6
7
|
|
7
|
-
|
8
|
+
schema do
|
8
9
|
|
9
10
|
# normalized properties for all endpoints
|
10
|
-
|
11
|
+
key String
|
11
12
|
|
12
|
-
|
13
|
+
chat_options as: :generationConfig do
|
13
14
|
|
14
15
|
# normalized properties for google generative text endpoint
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
16
|
+
model String
|
17
|
+
max_tokens Integer, as: :maxOutputTokens
|
18
|
+
n Integer, as: :candidateCount
|
19
|
+
temperature Float
|
20
|
+
top_k Integer, as: :topK
|
21
|
+
top_p Float, as: :topP
|
22
|
+
seed Integer
|
23
|
+
stop String, array: true, as: :stopSequences
|
24
|
+
stream [ TrueClass, FalseClass ]
|
25
|
+
|
26
|
+
frequency_penalty Float, as: :frequencyPenalty
|
27
|
+
presence_penalty Float, as: :presencePenalty
|
27
28
|
|
28
29
|
# google variant of normalized properties for google generative text endpoints
|
29
|
-
|
30
|
-
|
31
|
-
|
30
|
+
candidate_count Integer, as: :candidateCount
|
31
|
+
max_output_tokens Integer, as: :maxOutputTokens
|
32
|
+
stop_sequences String, array: true, as: :stopSequences
|
32
33
|
|
33
34
|
# google specific properties for google generative text endpoints
|
34
|
-
|
35
|
-
|
35
|
+
response_mime_type String, as: :responseMimeType
|
36
|
+
response_schema as: :responseSchema
|
36
37
|
|
37
|
-
|
38
|
+
# google specific tool configuration
|
39
|
+
tool_configuration as: :tool_config do
|
40
|
+
function_calling as: :function_calling_config do
|
41
|
+
mode Symbol, in: [ :auto, :any, :none ]
|
42
|
+
allowed_function_names String, array: true
|
43
|
+
end
|
44
|
+
end
|
38
45
|
|
39
|
-
end
|
40
46
|
|
41
|
-
|
42
|
-
|
43
|
-
attr_reader :stream
|
44
|
-
attr_reader :chat_options
|
45
|
-
|
46
|
-
def initialize( attributes = nil, &block )
|
47
|
-
configuration = self.class.configure( attributes, &block ).to_h
|
48
|
-
@key = configuration.delete( :key )
|
49
|
-
@model = configuration[ :generationConfig ]&.delete( :model )
|
50
|
-
@stream = configuration[ :generationConfig ]&.delete( :stream ) || false
|
51
|
-
@chat_options = configuration[ :generationConfig ] || {}
|
47
|
+
end
|
48
|
+
|
52
49
|
end
|
53
50
|
|
54
|
-
include
|
51
|
+
include ChatRequestMethods
|
52
|
+
include ChatResponseMethods
|
55
53
|
|
56
54
|
end
|
57
55
|
|
@@ -0,0 +1,233 @@
|
|
1
|
+
require 'uri'
|
2
|
+
|
3
|
+
module Intelligence
|
4
|
+
module Google
|
5
|
+
module ChatRequestMethods
|
6
|
+
|
7
|
+
GENERATIVE_LANGUAGE_URI = "https://generativelanguage.googleapis.com/v1beta/models/"
|
8
|
+
|
9
|
+
SUPPORTED_BINARY_MEDIA_TYPES = %w[ text ]
|
10
|
+
|
11
|
+
SUPPORTED_BINARY_CONTENT_TYPES = %w[
|
12
|
+
image/png image/jpeg image/webp image/heic image/heif
|
13
|
+
audio/aac audio/flac audio/mp3 audio/m4a audio/mpeg audio/mpga audio/mp4 audio/opus
|
14
|
+
audio/pcm audio/wav audio/webm
|
15
|
+
application/pdf
|
16
|
+
]
|
17
|
+
|
18
|
+
SUPPORTED_FILE_MEDIA_TYPES = %w[ text ]
|
19
|
+
|
20
|
+
SUPPORTED_CONTENT_TYPES = %w[
|
21
|
+
image/png image/jpeg image/webp image/heic image/heif
|
22
|
+
video/x-flv video/quicktime video/mpeg video/mpegps video/mpg video/mp4 video/webm
|
23
|
+
video/wmv video/3gpp
|
24
|
+
audio/aac audio/flac audio/mp3 audio/m4a audio/mpeg audio/mpga audio/mp4 audio/opus
|
25
|
+
audio/pcm audio/wav audio/webm
|
26
|
+
application/pdf
|
27
|
+
]
|
28
|
+
|
29
|
+
def chat_request_uri( options )
|
30
|
+
options = @options.merge( build_options( options ) )
|
31
|
+
|
32
|
+
key = options[ :key ]
|
33
|
+
gc = options[ :generationConfig ] || {}
|
34
|
+
model = gc[ :model ]
|
35
|
+
stream = gc.key?( :stream ) ? gc[ :stream ] : false
|
36
|
+
|
37
|
+
raise ArgumentError.new( "A Google API key is required to build a Google chat request." ) \
|
38
|
+
if key.nil?
|
39
|
+
raise ArgumentError.new( "A Google model is required to build a Google chat request." ) \
|
40
|
+
if model.nil?
|
41
|
+
|
42
|
+
uri = URI( GENERATIVE_LANGUAGE_URI )
|
43
|
+
path = File.join( uri.path, model )
|
44
|
+
path += stream ? ':streamGenerateContent' : ':generateContent'
|
45
|
+
uri.path = path
|
46
|
+
query = { key: key }
|
47
|
+
query[ :alt ] = 'sse' if stream
|
48
|
+
uri.query = URI.encode_www_form( query )
|
49
|
+
|
50
|
+
uri.to_s
|
51
|
+
end
|
52
|
+
|
53
|
+
def chat_request_headers( options = {} )
|
54
|
+
{ 'Content-Type' => 'application/json' }
|
55
|
+
end
|
56
|
+
|
57
|
+
def chat_request_body( conversation, options = {} )
|
58
|
+
options = @options.merge( build_options( options ) )
|
59
|
+
|
60
|
+
gc = options[ :generationConfig ]
|
61
|
+
# discard properties not part of the google generationConfig schema
|
62
|
+
gc.delete( :model )
|
63
|
+
gc.delete( :stream )
|
64
|
+
|
65
|
+
# googlify tool configuration
|
66
|
+
if tool_config = gc.delete( :tool_config )
|
67
|
+
mode = tool_config[ :function_calling_config ]&.[]( :mode )
|
68
|
+
tool_config[ :function_calling_config ][ :mode ] = mode.to_s.upcase if mode
|
69
|
+
end
|
70
|
+
|
71
|
+
result = {}
|
72
|
+
result[ :generationConfig ] = gc
|
73
|
+
result[ :tool_config ] = tool_config if tool_config
|
74
|
+
|
75
|
+
# construct the system prompt in the form of the google schema
|
76
|
+
system_instructions = to_google_system_message( conversation[ :system_message ] )
|
77
|
+
result[ :systemInstruction ] = system_instructions if system_instructions
|
78
|
+
|
79
|
+
result[ :contents ] = []
|
80
|
+
conversation[ :messages ]&.each do | message |
|
81
|
+
|
82
|
+
result_message = { role: message[ :role ] == :user ? 'user' : 'model' }
|
83
|
+
result_message_parts = []
|
84
|
+
|
85
|
+
message[ :contents ]&.each do | content |
|
86
|
+
case content[ :type ]
|
87
|
+
when :text
|
88
|
+
result_message_parts << { text: content[ :text ] }
|
89
|
+
when :binary
|
90
|
+
content_type = content[ :content_type ]
|
91
|
+
bytes = content[ :bytes ]
|
92
|
+
if content_type && bytes
|
93
|
+
mime_type = MIME::Types[ content_type ].first
|
94
|
+
if SUPPORTED_BINARY_MEDIA_TYPES.include?( mime_type&.media_type ) ||
|
95
|
+
SUPPORTED_BINARY_CONTENT_TYPES.include?( content_type )
|
96
|
+
result_message_parts << {
|
97
|
+
inline_data: {
|
98
|
+
mime_type: content_type,
|
99
|
+
data: Base64.strict_encode64( bytes )
|
100
|
+
}
|
101
|
+
}
|
102
|
+
else
|
103
|
+
raise UnsupportedContentError.new(
|
104
|
+
:google,
|
105
|
+
"does not support #{content_type} content type"
|
106
|
+
)
|
107
|
+
end
|
108
|
+
else
|
109
|
+
raise UnsupportedContentError.new(
|
110
|
+
:google,
|
111
|
+
'requires binary content to include content type and ( packed ) bytes'
|
112
|
+
)
|
113
|
+
end
|
114
|
+
when :file
|
115
|
+
content_type = content[ :content_type ]
|
116
|
+
uri = content[ :uri ]
|
117
|
+
if content_type && uri
|
118
|
+
mime_type = MIME::Types[ content_type ].first
|
119
|
+
if SUPPORTED_FILE_MEDIA_TYPES.include?( mime_type&.media_type ) ||
|
120
|
+
SUPPORTED_FILE_CONTENT_TYPES.include?( content_type )
|
121
|
+
result_message_parts << {
|
122
|
+
file_data: {
|
123
|
+
mime_type: content_type,
|
124
|
+
file_uri: uri
|
125
|
+
}
|
126
|
+
}
|
127
|
+
else
|
128
|
+
raise UnsupportedContentError.new(
|
129
|
+
:google,
|
130
|
+
"does not support #{content_type} content type"
|
131
|
+
)
|
132
|
+
end
|
133
|
+
else
|
134
|
+
raise UnsupportedContentError.new(
|
135
|
+
:google,
|
136
|
+
'requires file content to include content type and uri'
|
137
|
+
)
|
138
|
+
end
|
139
|
+
when :tool_call
|
140
|
+
result_message_parts << {
|
141
|
+
functionCall: {
|
142
|
+
name: content[ :tool_name ],
|
143
|
+
args: content[ :tool_parameters ]
|
144
|
+
}
|
145
|
+
}
|
146
|
+
when :tool_result
|
147
|
+
result_message_parts << {
|
148
|
+
functionResponse: {
|
149
|
+
name: content[ :tool_name ],
|
150
|
+
response: {
|
151
|
+
name: content[ :tool_name ],
|
152
|
+
content: content[ :tool_result ]
|
153
|
+
}
|
154
|
+
}
|
155
|
+
}
|
156
|
+
else
|
157
|
+
raise InvalidContentError.new( :google )
|
158
|
+
end
|
159
|
+
end
|
160
|
+
|
161
|
+
result_message[ :parts ] = result_message_parts
|
162
|
+
result[ :contents ] << result_message
|
163
|
+
|
164
|
+
end
|
165
|
+
|
166
|
+
tools_attributes = to_google_tools( conversation[ :tools ] )
|
167
|
+
result[ :tools ] = tools_attributes if tools_attributes && tools_attributes.length > 0
|
168
|
+
|
169
|
+
JSON.generate( result )
|
170
|
+
end
|
171
|
+
|
172
|
+
private
|
173
|
+
|
174
|
+
def to_google_system_message( system_message )
|
175
|
+
return nil if system_message.nil?
|
176
|
+
|
177
|
+
text = ''
|
178
|
+
system_message[ :contents ].each do | content |
|
179
|
+
text += content[ :text ] if content[ :type ] == :text
|
180
|
+
end
|
181
|
+
|
182
|
+
return nil if text.empty?
|
183
|
+
|
184
|
+
{
|
185
|
+
role: 'user',
|
186
|
+
parts: [
|
187
|
+
{ text: text }
|
188
|
+
]
|
189
|
+
}
|
190
|
+
end
|
191
|
+
|
192
|
+
def to_google_tools( tools )
|
193
|
+
properties_array_to_object = lambda do | properties |
|
194
|
+
return nil unless properties&.any?
|
195
|
+
object = {}
|
196
|
+
required = []
|
197
|
+
properties.each do | property |
|
198
|
+
name = property.delete( :name )
|
199
|
+
required << name if property.delete( :required )
|
200
|
+
if property[ :properties ]&.any?
|
201
|
+
property_properties, property_required =
|
202
|
+
properties_array_to_object.call( property[ :properties ] )
|
203
|
+
property[ :properties ] = property_properties
|
204
|
+
property[ :required ] = property_required if property_required.any?
|
205
|
+
end
|
206
|
+
object[ name ] = property
|
207
|
+
end
|
208
|
+
[ object, required.compact ]
|
209
|
+
end
|
210
|
+
|
211
|
+
return [ { function_declarations: tools&.map { | tool |
|
212
|
+
function = {
|
213
|
+
name: tool[ :name ],
|
214
|
+
description: tool[ :description ],
|
215
|
+
}
|
216
|
+
if tool[ :properties ]&.any?
|
217
|
+
properties_object, properties_required =
|
218
|
+
properties_array_to_object.call( tool[ :properties ] )
|
219
|
+
function[ :parameters ] = {
|
220
|
+
type: 'object',
|
221
|
+
properties: properties_object
|
222
|
+
}
|
223
|
+
function[ :parameters ][ :required ] = properties_required if properties_required.any?
|
224
|
+
end
|
225
|
+
function
|
226
|
+
} } ]
|
227
|
+
end
|
228
|
+
|
229
|
+
end
|
230
|
+
|
231
|
+
end
|
232
|
+
|
233
|
+
end
|
@@ -2,110 +2,14 @@ require 'uri'
|
|
2
2
|
|
3
3
|
module Intelligence
|
4
4
|
module Google
|
5
|
-
module
|
5
|
+
module ChatResponseMethods
|
6
6
|
|
7
|
-
|
8
|
-
|
9
|
-
def chat_request_uri( options )
|
10
|
-
|
11
|
-
options = options.nil? || options.empty? ? {} : self.class.configure( options )
|
12
|
-
|
13
|
-
key = options[ :key ] || self.key
|
14
|
-
|
15
|
-
gc = options[ :generationConfig ] || {}
|
16
|
-
model = gc[ :model ] || self.model
|
17
|
-
stream = gc.key?( :stream ) ? gc[ :stream ] : self.stream
|
18
|
-
|
19
|
-
raise ArgumentError.new( "A Google API key is required to build a Google chat request." ) \
|
20
|
-
if self.key.nil?
|
21
|
-
raise ArgumentError.new( "A Google model is required to build a Google chat request." ) \
|
22
|
-
if model.nil?
|
23
|
-
|
24
|
-
uri = URI( GENERATIVE_LANGUAGE_URI )
|
25
|
-
path = File.join( uri.path, model )
|
26
|
-
path += stream ? ':streamGenerateContent' : ':generateContent'
|
27
|
-
uri.path = path
|
28
|
-
query = { key: self.key }
|
29
|
-
query[ :alt ] = 'sse' if stream
|
30
|
-
uri.query = URI.encode_www_form( query )
|
31
|
-
|
32
|
-
uri.to_s
|
33
|
-
|
34
|
-
end
|
35
|
-
|
36
|
-
def chat_request_headers( options = {} )
|
37
|
-
{ 'Content-Type' => 'application/json' }
|
38
|
-
end
|
39
|
-
|
40
|
-
def chat_request_body( conversation, options = {} )
|
41
|
-
|
42
|
-
result = {}
|
43
|
-
result[ :generationConfig ] = self.chat_options
|
44
|
-
|
45
|
-
options = options.nil? || options.empty? ? {} : self.class.configure( options )
|
46
|
-
result = result.merge( options )
|
47
|
-
|
48
|
-
# discard properties not part of the google endpoint schema
|
49
|
-
result[ :generationConfig ].delete( :model )
|
50
|
-
result[ :generationConfig ].delete( :stream )
|
51
|
-
|
52
|
-
# construct the system prompt in the form of the google schema
|
53
|
-
system_instructions = translate_system_message( conversation[ :system_message ] )
|
54
|
-
result[ :systemInstruction ] = system_instructions if system_instructions
|
55
|
-
|
56
|
-
result[ :contents ] = []
|
57
|
-
conversation[ :messages ]&.each do | message |
|
58
|
-
|
59
|
-
result_message = { role: message[ :role ] == :user ? 'user' : 'model' }
|
60
|
-
result_message_parts = []
|
61
|
-
|
62
|
-
message[ :contents ]&.each do | content |
|
63
|
-
case content[ :type ]
|
64
|
-
when :text
|
65
|
-
result_message_parts << { text: content[ :text ] }
|
66
|
-
when :binary
|
67
|
-
content_type = content[ :content_type ]
|
68
|
-
bytes = content[ :bytes ]
|
69
|
-
if content_type && bytes
|
70
|
-
unless MIME::Types[ content_type ].empty?
|
71
|
-
# TODO: verify the specific google supported MIME types
|
72
|
-
result_message_parts << {
|
73
|
-
inline_data: {
|
74
|
-
mime_type: content_type,
|
75
|
-
data: Base64.strict_encode64( bytes )
|
76
|
-
}
|
77
|
-
}
|
78
|
-
else
|
79
|
-
raise UnsupportedContentError.new(
|
80
|
-
:google,
|
81
|
-
'only support recognized mime types'
|
82
|
-
)
|
83
|
-
end
|
84
|
-
else
|
85
|
-
raise UnsupportedContentError.new(
|
86
|
-
:google,
|
87
|
-
'requires binary content to include content type and ( packed ) bytes'
|
88
|
-
)
|
89
|
-
end
|
90
|
-
end
|
91
|
-
end
|
92
|
-
|
93
|
-
result_message[ :parts ] = result_message_parts
|
94
|
-
result[ :contents ] << result_message
|
95
|
-
|
96
|
-
end
|
97
|
-
|
98
|
-
JSON.generate( result )
|
99
|
-
|
100
|
-
end
|
101
|
-
|
102
|
-
def chat_result_attributes( response )
|
7
|
+
def chat_result_attributes( response )
|
103
8
|
|
104
9
|
return nil unless response.success?
|
105
10
|
|
106
11
|
response_json = JSON.parse( response.body, symbolize_names: true ) rescue nil
|
107
|
-
return nil
|
108
|
-
if response_json.nil? || response_json[ :candidates ].nil?
|
12
|
+
return nil if response_json.nil? || response_json[ :candidates ].nil?
|
109
13
|
|
110
14
|
result = {}
|
111
15
|
result[ :choices ] = []
|
@@ -120,13 +24,21 @@ module Intelligence
|
|
120
24
|
response_content = response_choice[ :content ]
|
121
25
|
if response_content
|
122
26
|
role = ( response_content[ :role ] == 'model' ) ? 'assistant' : 'user'
|
123
|
-
|
124
27
|
contents = []
|
125
28
|
response_content[ :parts ]&.each do | response_content_part |
|
126
29
|
if response_content_part.key?( :text )
|
127
30
|
contents.push( {
|
128
31
|
type: 'text', text: response_content_part[ :text ]
|
129
32
|
} )
|
33
|
+
elsif function_call = response_content_part[ :functionCall ]
|
34
|
+
contents.push( {
|
35
|
+
type: :tool_call,
|
36
|
+
tool_name: function_call[ :name ],
|
37
|
+
tool_parameters: function_call[ :args ]
|
38
|
+
} )
|
39
|
+
# google does not indicate there is tool call in the stop reason so
|
40
|
+
# we will synthesize this end reason
|
41
|
+
end_reason = :tool_called if end_reason == :ended
|
130
42
|
end
|
131
43
|
end
|
132
44
|
end
|
@@ -178,7 +90,6 @@ module Intelligence
|
|
178
90
|
end
|
179
91
|
|
180
92
|
def stream_result_chunk_attributes( context, chunk )
|
181
|
-
#---------------------------------------------------
|
182
93
|
|
183
94
|
context ||= {}
|
184
95
|
buffer = context[ :buffer ] || ''
|
@@ -257,7 +168,6 @@ module Intelligence
|
|
257
168
|
end
|
258
169
|
|
259
170
|
def stream_result_attributes( context )
|
260
|
-
#--------------------------------------
|
261
171
|
|
262
172
|
choices = context[ :choices ]
|
263
173
|
metrics = context[ :metrics ]
|
@@ -272,71 +182,51 @@ module Intelligence
|
|
272
182
|
|
273
183
|
alias_method :stream_result_error_attributes, :chat_result_error_attributes
|
274
184
|
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
return nil if system_message.nil?
|
279
|
-
|
280
|
-
text = ''
|
281
|
-
system_message[ :contents ].each do | content |
|
282
|
-
text += content[ :text ] if content[ :type ] == :text
|
283
|
-
end
|
284
|
-
|
285
|
-
return nil if text.empty?
|
286
|
-
|
287
|
-
{
|
288
|
-
role: 'user',
|
289
|
-
parts: [
|
290
|
-
{ text: text }
|
291
|
-
]
|
292
|
-
}
|
293
|
-
|
294
|
-
end
|
295
|
-
|
296
|
-
private; def translate_finish_reason( finish_reason )
|
297
|
-
# ---------------------------------------------------
|
185
|
+
private
|
186
|
+
|
187
|
+
def translate_finish_reason( finish_reason )
|
298
188
|
case finish_reason
|
299
|
-
|
300
|
-
|
301
|
-
|
302
|
-
|
303
|
-
|
304
|
-
|
305
|
-
|
306
|
-
|
189
|
+
when 'STOP'
|
190
|
+
:ended
|
191
|
+
when 'MAX_TOKENS'
|
192
|
+
:token_limit_exceeded
|
193
|
+
when 'SAFETY', 'RECITATION', 'BLOCKLIST', 'PROHIBITED_CONTENT', 'SPII'
|
194
|
+
:filtered
|
195
|
+
else
|
196
|
+
nil
|
307
197
|
end
|
308
198
|
end
|
309
199
|
|
310
|
-
|
200
|
+
def translate_error_response_status( status )
|
311
201
|
case status
|
312
|
-
|
313
|
-
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
|
318
|
-
|
319
|
-
|
320
|
-
|
321
|
-
|
322
|
-
|
323
|
-
|
324
|
-
|
325
|
-
|
326
|
-
|
327
|
-
|
328
|
-
|
329
|
-
|
330
|
-
|
331
|
-
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
337
|
-
|
338
|
-
|
339
|
-
|
202
|
+
when 400
|
203
|
+
[ :invalid_request_error,
|
204
|
+
"There was an issue with the format or content of your request." ]
|
205
|
+
when 403
|
206
|
+
[ :permission_error,
|
207
|
+
"Your API key does not have permission to use the specified resource." ]
|
208
|
+
when 404
|
209
|
+
[ :not_found_error,
|
210
|
+
"The requested resource was not found." ]
|
211
|
+
when 413
|
212
|
+
[ :request_too_large,
|
213
|
+
"Request exceeds the maximum allowed number of bytes." ]
|
214
|
+
when 422
|
215
|
+
[ :invalid_request_error,
|
216
|
+
"There was an issue with the format or content of your request." ]
|
217
|
+
when 429
|
218
|
+
[ :rate_limit_error,
|
219
|
+
"Your account has hit a rate limit." ]
|
220
|
+
when 500, 502, 503
|
221
|
+
[ :api_error,
|
222
|
+
"An unexpected error has occurred internal to the providers systems." ]
|
223
|
+
when 529
|
224
|
+
[ :overloaded_error,
|
225
|
+
"The providers server is temporarily overloaded." ]
|
226
|
+
else
|
227
|
+
[ :unknown_error, "
|
228
|
+
An unknown error occurred." ]
|
229
|
+
end
|
340
230
|
end
|
341
231
|
|
342
232
|
end
|
@@ -7,41 +7,59 @@ module Intelligence
|
|
7
7
|
|
8
8
|
chat_request_uri 'https://api.groq.com/openai/v1/chat/completions'
|
9
9
|
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
group :response_format do
|
10
|
+
schema do
|
11
|
+
key String
|
12
|
+
chat_options do
|
13
|
+
frequency_penalty Float
|
14
|
+
logit_bias
|
15
|
+
logprobs [ TrueClass, FalseClass ]
|
16
|
+
max_tokens Integer
|
17
|
+
model String
|
18
|
+
# the parallel_tool_calls is only allowed when 'tools' are specified
|
19
|
+
parallel_tool_calls [ TrueClass, FalseClass ]
|
20
|
+
presence_penalty Float
|
21
|
+
response_format do
|
23
22
|
# 'text' and 'json_object' are the only supported types; you must also instruct
|
24
23
|
# the model to output json
|
25
|
-
|
24
|
+
type Symbol, in: [ :text, :json_object ]
|
26
25
|
end
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
26
|
+
seed Integer
|
27
|
+
stop String, array: true
|
28
|
+
stream [ TrueClass, FalseClass ]
|
29
|
+
stream_options do
|
30
|
+
include_usage [ TrueClass, FalseClass ]
|
32
31
|
end
|
33
|
-
|
34
|
-
|
32
|
+
temperature Float
|
33
|
+
tool_choice do
|
35
34
|
# one of 'auto', 'none' or 'function'
|
36
|
-
|
37
|
-
# the function
|
38
|
-
|
39
|
-
|
35
|
+
type Symbol, in: [ :auto, :none, :function ]
|
36
|
+
# the function parameters is required if you specify a type of 'function'
|
37
|
+
function do
|
38
|
+
name String
|
40
39
|
end
|
41
40
|
end
|
42
|
-
|
43
|
-
|
44
|
-
|
41
|
+
top_logprobs Integer
|
42
|
+
top_p Float
|
43
|
+
user String
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
47
|
+
alias chat_request_generic_message_attributes chat_request_message_attributes
|
48
|
+
|
49
|
+
# groq models only support the legacy Open AI message schema for the assistant
|
50
|
+
# messages while supporting the modern message schema for user messages
|
51
|
+
def chat_request_message_attributes( message )
|
52
|
+
role = message[ :role ]&.to_sym
|
53
|
+
case role
|
54
|
+
when :user
|
55
|
+
chat_request_generic_message_attributes( message )
|
56
|
+
when :assistant
|
57
|
+
chat_request_legacy_message_attributes( message )
|
58
|
+
else
|
59
|
+
raise UnsupportedContentError.new(
|
60
|
+
:mistral,
|
61
|
+
'only supports user and assistant message roles'
|
62
|
+
)
|
45
63
|
end
|
46
64
|
end
|
47
65
|
|