durable-llm 0.1.3 → 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.envrc +7 -0
- data/CHANGELOG.md +5 -0
- data/CONFIGURE.md +132 -0
- data/Gemfile +7 -9
- data/Gemfile.lock +3 -3
- data/README.md +1 -0
- data/Rakefile +6 -6
- data/devenv.lock +103 -0
- data/devenv.nix +9 -0
- data/devenv.yaml +15 -0
- data/durable-llm.gemspec +44 -0
- data/examples/openai_quick_complete.rb +3 -1
- data/lib/durable/llm/cli.rb +247 -60
- data/lib/durable/llm/client.rb +92 -11
- data/lib/durable/llm/configuration.rb +174 -23
- data/lib/durable/llm/errors.rb +185 -0
- data/lib/durable/llm/providers/anthropic.rb +246 -36
- data/lib/durable/llm/providers/azure_openai.rb +347 -0
- data/lib/durable/llm/providers/base.rb +106 -9
- data/lib/durable/llm/providers/cohere.rb +227 -0
- data/lib/durable/llm/providers/deepseek.rb +233 -0
- data/lib/durable/llm/providers/fireworks.rb +278 -0
- data/lib/durable/llm/providers/google.rb +301 -0
- data/lib/durable/llm/providers/groq.rb +108 -29
- data/lib/durable/llm/providers/huggingface.rb +122 -18
- data/lib/durable/llm/providers/mistral.rb +431 -0
- data/lib/durable/llm/providers/openai.rb +162 -25
- data/lib/durable/llm/providers/opencode.rb +253 -0
- data/lib/durable/llm/providers/openrouter.rb +256 -0
- data/lib/durable/llm/providers/perplexity.rb +273 -0
- data/lib/durable/llm/providers/together.rb +346 -0
- data/lib/durable/llm/providers/xai.rb +355 -0
- data/lib/durable/llm/providers.rb +103 -15
- data/lib/durable/llm/version.rb +5 -1
- data/lib/durable/llm.rb +143 -3
- data/lib/durable.rb +29 -4
- data/sig/durable/llm.rbs +302 -1
- metadata +50 -36
@@ -0,0 +1,431 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
# Mistral AI provider for language models with completion, embedding, and streaming support.
|
4
|
+
# Handles authentication, HTTP connections, error management, and response normalization.
|
5
|
+
|
6
|
+
require 'faraday'
|
7
|
+
require 'json'
|
8
|
+
require 'event_stream_parser'
|
9
|
+
require 'durable/llm/errors'
|
10
|
+
require 'durable/llm/providers/base'
|
11
|
+
|
12
|
+
module Durable
|
13
|
+
module Llm
|
14
|
+
module Providers
|
15
|
+
# Mistral AI provider for accessing Mistral AI's language models
|
16
|
+
#
|
17
|
+
# This class provides a complete interface to Mistral AI's API, supporting
|
18
|
+
# text completions, embeddings, model listing, and streaming responses.
|
19
|
+
# It handles authentication, HTTP communication, error management, and
|
20
|
+
# response normalization to provide a consistent API experience.
|
21
|
+
#
|
22
|
+
# @example Basic usage
|
23
|
+
# provider = Durable::Llm::Providers::Mistral.new(api_key: 'your_key')
|
24
|
+
# response = provider.completion(model: 'mistral-medium', messages: [{role: 'user', content: 'Hello'}])
|
25
|
+
# puts response.choices.first.to_s
|
26
|
+
#
|
27
|
+
# @example Streaming responses
|
28
|
+
# provider.stream(model: 'mistral-medium', messages: [{role: 'user', content: 'Tell a story'}]) do |chunk|
|
29
|
+
# print chunk.to_s
|
30
|
+
# end
|
31
|
+
class Mistral < Durable::Llm::Providers::Base
|
32
|
+
# Base URL for Mistral AI API
|
33
|
+
BASE_URL = 'https://api.mistral.ai/v1'
|
34
|
+
|
35
|
+
# Returns the default API key for Mistral AI
|
36
|
+
#
|
37
|
+
# Checks the configuration object first, then falls back to the MISTRAL_API_KEY environment variable.
|
38
|
+
#
|
39
|
+
# @return [String, nil] The default API key, or nil if not configured
|
40
|
+
def default_api_key
|
41
|
+
begin
|
42
|
+
Durable::Llm.configuration.mistral&.api_key
|
43
|
+
rescue NoMethodError
|
44
|
+
nil
|
45
|
+
end || ENV['MISTRAL_API_KEY']
|
46
|
+
end
|
47
|
+
|
48
|
+
# @!attribute [rw] api_key
|
49
|
+
# @return [String, nil] The API key used for Mistral AI authentication
|
50
|
+
attr_accessor :api_key
|
51
|
+
|
52
|
+
# Initializes a new Mistral provider instance
|
53
|
+
#
|
54
|
+
# @param api_key [String, nil] The API key for Mistral AI. If not provided, uses default_api_key
|
55
|
+
def initialize(api_key: nil)
|
56
|
+
super()
|
57
|
+
@api_key = api_key || default_api_key
|
58
|
+
@conn = Faraday.new(url: BASE_URL) do |faraday|
|
59
|
+
faraday.request :json
|
60
|
+
faraday.response :json
|
61
|
+
faraday.adapter Faraday.default_adapter
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
# Performs a chat completion request to Mistral AI
|
66
|
+
#
|
67
|
+
# @param options [Hash] The completion options
|
68
|
+
# @option options [String] :model The model to use (e.g., 'mistral-medium', 'mistral-small')
|
69
|
+
# @option options [Array<Hash>] :messages Array of message objects with :role and :content
|
70
|
+
# @option options [Float] :temperature (optional) Controls randomness (0.0 to 1.0)
|
71
|
+
# @option options [Integer] :max_tokens (optional) Maximum tokens to generate
|
72
|
+
# @return [MistralResponse] The completion response object
|
73
|
+
# @raise [Durable::Llm::AuthenticationError] If API key is invalid
|
74
|
+
# @raise [Durable::Llm::RateLimitError] If rate limit is exceeded
|
75
|
+
# @raise [Durable::Llm::InvalidRequestError] If request parameters are invalid
|
76
|
+
# @raise [Durable::Llm::ServerError] If Mistral AI servers encounter an error
|
77
|
+
def completion(options)
|
78
|
+
response = @conn.post('chat/completions') do |req|
|
79
|
+
req.headers['Authorization'] = "Bearer #{@api_key}"
|
80
|
+
req.body = options
|
81
|
+
end
|
82
|
+
|
83
|
+
handle_response(response)
|
84
|
+
end
|
85
|
+
|
86
|
+
# Generates embeddings for the given input text
|
87
|
+
#
|
88
|
+
# @param model [String] The embedding model to use (e.g., 'mistral-embed')
|
89
|
+
# @param input [String, Array<String>] The text(s) to embed
|
90
|
+
# @param options [Hash] Additional options for the embedding request
|
91
|
+
# @return [MistralEmbeddingResponse] The embedding response object
|
92
|
+
# @raise [Durable::Llm::AuthenticationError] If API key is invalid
|
93
|
+
# @raise [Durable::Llm::RateLimitError] If rate limit is exceeded
|
94
|
+
# @raise [Durable::Llm::InvalidRequestError] If request parameters are invalid
|
95
|
+
# @raise [Durable::Llm::ServerError] If Mistral AI servers encounter an error
|
96
|
+
def embedding(model:, input:, **options)
|
97
|
+
response = @conn.post('embeddings') do |req|
|
98
|
+
req.headers['Authorization'] = "Bearer #{@api_key}"
|
99
|
+
req.body = { model: model, input: input, **options }
|
100
|
+
end
|
101
|
+
|
102
|
+
handle_response(response, MistralEmbeddingResponse)
|
103
|
+
end
|
104
|
+
|
105
|
+
# Retrieves the list of available models from Mistral AI
|
106
|
+
#
|
107
|
+
# @return [Array<String>] Array of available model identifiers
|
108
|
+
# @raise [Durable::Llm::AuthenticationError] If API key is invalid
|
109
|
+
# @raise [Durable::Llm::RateLimitError] If rate limit is exceeded
|
110
|
+
# @raise [Durable::Llm::ServerError] If Mistral AI servers encounter an error
|
111
|
+
def models
|
112
|
+
response = @conn.get('models') do |req|
|
113
|
+
req.headers['Authorization'] = "Bearer #{@api_key}"
|
114
|
+
end
|
115
|
+
|
116
|
+
handle_response(response).data.map { |model| model['id'] }
|
117
|
+
end
|
118
|
+
|
119
|
+
# Indicates whether this provider supports streaming responses
|
120
|
+
#
|
121
|
+
# @return [Boolean] Always returns true for Mistral provider
|
122
|
+
def self.stream?
|
123
|
+
true
|
124
|
+
end
|
125
|
+
|
126
|
+
# Performs a streaming chat completion request to Mistral AI
|
127
|
+
#
|
128
|
+
# Yields response chunks as they arrive from the API.
|
129
|
+
#
|
130
|
+
# @param options [Hash] The stream options (same as completion plus :stream => true)
|
131
|
+
# @yield [MistralStreamResponse] Each streaming response chunk
|
132
|
+
# @return [nil] Returns nil after streaming is complete
|
133
|
+
# @raise [Durable::Llm::AuthenticationError] If API key is invalid
|
134
|
+
# @raise [Durable::Llm::RateLimitError] If rate limit is exceeded
|
135
|
+
# @raise [Durable::Llm::InvalidRequestError] If request parameters are invalid
|
136
|
+
# @raise [Durable::Llm::ServerError] If Mistral AI servers encounter an error
|
137
|
+
def stream(options)
|
138
|
+
options[:stream] = true
|
139
|
+
|
140
|
+
response = @conn.post('chat/completions') do |req|
|
141
|
+
req.headers['Authorization'] = "Bearer #{@api_key}"
|
142
|
+
req.headers['Accept'] = 'text/event-stream'
|
143
|
+
|
144
|
+
options['temperature'] = options['temperature'].to_f if options['temperature']
|
145
|
+
|
146
|
+
req.body = options
|
147
|
+
|
148
|
+
user_proc = proc do |chunk, _size, _total|
|
149
|
+
yield MistralStreamResponse.new(chunk)
|
150
|
+
end
|
151
|
+
|
152
|
+
req.options.on_data = to_json_stream(user_proc: user_proc)
|
153
|
+
end
|
154
|
+
|
155
|
+
handle_response(response)
|
156
|
+
end
|
157
|
+
|
158
|
+
private
|
159
|
+
|
160
|
+
# CODE-FROM: ruby-openai @ https://github.com/alexrudall/ruby-openai/blob/main/lib/openai/http.rb
|
161
|
+
# MIT License: https://github.com/alexrudall/ruby-openai/blob/main/LICENSE.md
|
162
|
+
|
163
|
+
# Creates a proc for processing JSON streaming responses
|
164
|
+
#
|
165
|
+
# @param user_proc [Proc] The proc to call with each parsed JSON chunk
|
166
|
+
# @return [Proc] A proc that handles raw streaming data and parses it as JSON
|
167
|
+
# @private
|
168
|
+
def to_json_stream(user_proc:)
|
169
|
+
parser = EventStreamParser::Parser.new
|
170
|
+
|
171
|
+
proc do |chunk, _bytes, env|
|
172
|
+
if env && env.status != 200
|
173
|
+
raise_error = Faraday::Response::RaiseError.new
|
174
|
+
raise_error.on_complete(env.merge(body: try_parse_json(chunk)))
|
175
|
+
end
|
176
|
+
|
177
|
+
parser.feed(chunk) do |_type, data|
|
178
|
+
user_proc.call(JSON.parse(data)) unless data == '[DONE]'
|
179
|
+
end
|
180
|
+
end
|
181
|
+
end
|
182
|
+
|
183
|
+
# Attempts to parse a string as JSON, returning the string if parsing fails
|
184
|
+
#
|
185
|
+
# @param maybe_json [String] The string that might be JSON
|
186
|
+
# @return [Hash, Array, String] Parsed JSON object or original string if parsing failed
|
187
|
+
# @private
|
188
|
+
def try_parse_json(maybe_json)
|
189
|
+
JSON.parse(maybe_json)
|
190
|
+
rescue JSON::ParserError
|
191
|
+
maybe_json
|
192
|
+
end
|
193
|
+
|
194
|
+
# END-CODE-FROM
|
195
|
+
|
196
|
+
# Processes the HTTP response and handles errors or returns normalized response
|
197
|
+
#
|
198
|
+
# @param response [Faraday::Response] The HTTP response from the API
|
199
|
+
# @param response_class [Class] The response class to instantiate for successful responses
|
200
|
+
# @return [Object] Instance of response_class for successful responses
|
201
|
+
# @raise [Durable::Llm::AuthenticationError] For 401 responses
|
202
|
+
# @raise [Durable::Llm::RateLimitError] For 429 responses
|
203
|
+
# @raise [Durable::Llm::InvalidRequestError] For 4xx responses
|
204
|
+
# @raise [Durable::Llm::ServerError] For 5xx responses
|
205
|
+
# @raise [Durable::Llm::APIError] For unexpected response codes
|
206
|
+
# @private
|
207
|
+
def handle_response(response, response_class = MistralResponse)
|
208
|
+
case response.status
|
209
|
+
when 200..299
|
210
|
+
response_class.new(response.body)
|
211
|
+
when 401
|
212
|
+
raise Durable::Llm::AuthenticationError, parse_error_message(response)
|
213
|
+
when 429
|
214
|
+
raise Durable::Llm::RateLimitError, parse_error_message(response)
|
215
|
+
when 400..499
|
216
|
+
raise Durable::Llm::InvalidRequestError, parse_error_message(response)
|
217
|
+
when 500..599
|
218
|
+
raise Durable::Llm::ServerError, parse_error_message(response)
|
219
|
+
else
|
220
|
+
raise Durable::Llm::APIError, "Unexpected response code: #{response.status}"
|
221
|
+
end
|
222
|
+
end
|
223
|
+
|
224
|
+
# Extracts error message from API response
|
225
|
+
#
|
226
|
+
# @param response [Faraday::Response] The HTTP response containing error information
|
227
|
+
# @return [String] The formatted error message
|
228
|
+
# @private
|
229
|
+
def parse_error_message(response)
|
230
|
+
body = begin
|
231
|
+
JSON.parse(response.body)
|
232
|
+
rescue StandardError
|
233
|
+
nil
|
234
|
+
end
|
235
|
+
message = body&.dig('error', 'message') || response.body
|
236
|
+
"#{response.status} Error: #{message}"
|
237
|
+
end
|
238
|
+
|
239
|
+
# Response object for Mistral AI completion API responses
|
240
|
+
#
|
241
|
+
# Wraps the raw API response and provides convenient access to choices and data.
|
242
|
+
class MistralResponse
|
243
|
+
# @!attribute [r] raw_response
|
244
|
+
# @return [Hash] The raw response data from Mistral AI API
|
245
|
+
attr_reader :raw_response
|
246
|
+
|
247
|
+
# Initializes a new response object
|
248
|
+
#
|
249
|
+
# @param response [Hash] The raw API response data
|
250
|
+
def initialize(response)
|
251
|
+
@raw_response = response
|
252
|
+
end
|
253
|
+
|
254
|
+
# Returns the completion choices from the response
|
255
|
+
#
|
256
|
+
# @return [Array<MistralChoice>] Array of choice objects
|
257
|
+
def choices
|
258
|
+
@raw_response['choices'].map { |choice| MistralChoice.new(choice) }
|
259
|
+
end
|
260
|
+
|
261
|
+
# Returns the raw data array from the response
|
262
|
+
#
|
263
|
+
# @return [Array] The data array from the API response
|
264
|
+
def data
|
265
|
+
@raw_response['data']
|
266
|
+
end
|
267
|
+
|
268
|
+
# Returns the concatenated text of all choices
|
269
|
+
#
|
270
|
+
# @return [String] The combined text content of all choices
|
271
|
+
def to_s
|
272
|
+
choices.map(&:to_s).join(' ')
|
273
|
+
end
|
274
|
+
end
|
275
|
+
|
276
|
+
# Represents a single completion choice from Mistral AI
|
277
|
+
#
|
278
|
+
# Contains the message content and finish reason for a completion choice.
|
279
|
+
class MistralChoice
|
280
|
+
# @!attribute [r] message
|
281
|
+
# @return [MistralMessage] The message object for this choice
|
282
|
+
# @!attribute [r] finish_reason
|
283
|
+
# @return [String] The reason the completion finished (e.g., 'stop', 'length')
|
284
|
+
attr_reader :message, :finish_reason
|
285
|
+
|
286
|
+
# Initializes a new choice object
|
287
|
+
#
|
288
|
+
# @param choice [Hash] The raw choice data from the API response
|
289
|
+
def initialize(choice)
|
290
|
+
@message = MistralMessage.new(choice['message'])
|
291
|
+
@finish_reason = choice['finish_reason']
|
292
|
+
end
|
293
|
+
|
294
|
+
# Returns the text content of the message
|
295
|
+
#
|
296
|
+
# @return [String] The message content
|
297
|
+
def to_s
|
298
|
+
@message.to_s
|
299
|
+
end
|
300
|
+
end
|
301
|
+
|
302
|
+
# Represents a chat message in Mistral AI conversations
|
303
|
+
#
|
304
|
+
# Contains the role (user/assistant/system) and content of a message.
|
305
|
+
class MistralMessage
|
306
|
+
# @!attribute [r] role
|
307
|
+
# @return [String] The role of the message sender ('user', 'assistant', or 'system')
|
308
|
+
# @!attribute [r] content
|
309
|
+
# @return [String] The text content of the message
|
310
|
+
attr_reader :role, :content
|
311
|
+
|
312
|
+
# Initializes a new message object
|
313
|
+
#
|
314
|
+
# @param message [Hash] The raw message data from the API
|
315
|
+
def initialize(message)
|
316
|
+
@role = message['role']
|
317
|
+
@content = message['content']
|
318
|
+
end
|
319
|
+
|
320
|
+
# Returns the message content
|
321
|
+
#
|
322
|
+
# @return [String] The text content of the message
|
323
|
+
def to_s
|
324
|
+
@content
|
325
|
+
end
|
326
|
+
end
|
327
|
+
|
328
|
+
# Response object for streaming Mistral AI API responses
|
329
|
+
#
|
330
|
+
# Wraps streaming response chunks and provides access to streaming choices.
|
331
|
+
class MistralStreamResponse
|
332
|
+
# @!attribute [r] choices
|
333
|
+
# @return [MistralStreamChoice] The streaming choice object
|
334
|
+
attr_reader :choices
|
335
|
+
|
336
|
+
# Initializes a new streaming response object
|
337
|
+
#
|
338
|
+
# @param parsed [Hash] The parsed streaming response data
|
339
|
+
def initialize(parsed)
|
340
|
+
@choices = MistralStreamChoice.new(parsed['choices'])
|
341
|
+
end
|
342
|
+
|
343
|
+
# Returns the text content of the streaming response
|
344
|
+
#
|
345
|
+
# @return [String] The text content from the streaming choice
|
346
|
+
def to_s
|
347
|
+
@choices.to_s
|
348
|
+
end
|
349
|
+
end
|
350
|
+
|
351
|
+
# Response object for Mistral AI embedding API responses
|
352
|
+
#
|
353
|
+
# Contains the embedding vector data from Mistral AI's embedding models.
|
354
|
+
class MistralEmbeddingResponse
|
355
|
+
# @!attribute [r] embedding
|
356
|
+
# @return [Array<Float>] The embedding vector
|
357
|
+
attr_reader :embedding
|
358
|
+
|
359
|
+
# Initializes a new embedding response object
|
360
|
+
#
|
361
|
+
# @param data [Hash] The raw embedding response data from the API
|
362
|
+
def initialize(data)
|
363
|
+
@embedding = data.dig('data', 0, 'embedding')
|
364
|
+
end
|
365
|
+
|
366
|
+
# Returns the embedding as an array
|
367
|
+
#
|
368
|
+
# @return [Array<Float>] The embedding vector
|
369
|
+
def to_a
|
370
|
+
@embedding
|
371
|
+
end
|
372
|
+
end
|
373
|
+
|
374
|
+
# Represents a streaming choice from Mistral AI
|
375
|
+
#
|
376
|
+
# Contains delta content and finish reason for streaming completions.
|
377
|
+
class MistralStreamChoice
|
378
|
+
# @!attribute [r] delta
|
379
|
+
# @return [MistralStreamDelta] The delta content for this streaming choice
|
380
|
+
# @!attribute [r] finish_reason
|
381
|
+
# @return [String, nil] The reason the stream finished, or nil if still streaming
|
382
|
+
attr_reader :delta, :finish_reason
|
383
|
+
|
384
|
+
# Initializes a new streaming choice object
|
385
|
+
#
|
386
|
+
# @param choice [Array<Hash>, Hash] The raw streaming choice data
|
387
|
+
def initialize(choice)
|
388
|
+
@choice = [choice].flatten.first
|
389
|
+
@delta = MistralStreamDelta.new(@choice['delta'])
|
390
|
+
@finish_reason = @choice['finish_reason']
|
391
|
+
end
|
392
|
+
|
393
|
+
# Returns the text content of the delta
|
394
|
+
#
|
395
|
+
# @return [String] The delta content
|
396
|
+
def to_s
|
397
|
+
@delta.to_s
|
398
|
+
end
|
399
|
+
end
|
400
|
+
|
401
|
+
# Represents a delta (incremental change) in streaming responses
|
402
|
+
#
|
403
|
+
# Contains incremental content and role information for streaming completions.
|
404
|
+
class MistralStreamDelta
|
405
|
+
# @!attribute [r] role
|
406
|
+
# @return [String, nil] The role for this delta, or nil if not present
|
407
|
+
# @!attribute [r] content
|
408
|
+
# @return [String, nil] The incremental content, or nil if not present
|
409
|
+
attr_reader :role, :content
|
410
|
+
|
411
|
+
# Initializes a new stream delta object
|
412
|
+
#
|
413
|
+
# @param delta [Hash] The raw delta data from the streaming response
|
414
|
+
def initialize(delta)
|
415
|
+
@role = delta['role']
|
416
|
+
@content = delta['content']
|
417
|
+
end
|
418
|
+
|
419
|
+
# Returns the content of the delta, or empty string if none
|
420
|
+
#
|
421
|
+
# @return [String] The delta content or empty string
|
422
|
+
def to_s
|
423
|
+
@content || ''
|
424
|
+
end
|
425
|
+
end
|
426
|
+
end
|
427
|
+
end
|
428
|
+
end
|
429
|
+
end
|
430
|
+
|
431
|
+
# Copyright (c) 2025 Durable Programming, LLC. All rights reserved.
|