mistral_rb 0.1.1 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 69d426e25d1c19668cbde210ee7269a98e5769cb852c5184220c99460c7c2657
4
- data.tar.gz: d0b6828a2313d6bdfd18e0879c8ba8b05a1559a3e4c045d0d26970a2e40fa1c9
3
+ metadata.gz: 2e43cdb7fa62132f8342d13ec666a4fe48571dc0dbf5b64a2476c23d4da86aa5
4
+ data.tar.gz: 402b62dfe6f4ca6a2b77d3e13127b673037374fe75ffbe874c4cafef2fbdaeb3
5
5
  SHA512:
6
- metadata.gz: 647b05e72b0699d8e6c3bf74408cdb3b8cbc633b37221f0b53e074d1a28e7bf21ff74023c921f83b6a3df8882693d726716a328644e9b9bec53fc4d8dbebc333
7
- data.tar.gz: ae4b88b5773aa88981220b8cf3c1c301ca262275be1f67c6d37b26e195896df099512632d405afd0ec85a4e985873e48670b9a282614b9e74a20289c7b0e70ef
6
+ metadata.gz: 921683a278ab7f4d9f443c2db7a0dd16dd08b81752e6d99ac49fe619fb23e7d2cd145d30e8760988786ffa81647f9d666a73d0ac3a56e6c34ce921bd61a8339e
7
+ data.tar.gz: cbc90300023388689b9b5572f25dec507de1cc956f90c7406cfe8fbe685011200a248bbbb7574e564b3ff0b89a9726c60890c9175ac8e4832018b4766d1dabaf
data/README.md CHANGED
@@ -24,15 +24,15 @@ Here is a quick example to get you started:
24
24
  require 'mistral_rb'
25
25
 
26
26
  # Initialize the API with your API key
27
- mistral = MistralAPI.new("your_api_key_here")
27
+ mistral = MistralAPI.new(api_key: "your_api_key_here")
28
28
 
29
29
  # Create Chat Completion
30
- response = mistral.create_chat_completion("mistral-tiny", [{role: "user", content: "Who is Macron?"}])
30
+ response = mistral.create_chat_completion(model: "mistral-tiny", messages: [{role: "user", content: "Who is Macron?"}])
31
31
  puts response.choices.to_s
32
32
 
33
33
  # Create Embeddings
34
- embedding_response = mistral.create_embeddings("mistral-embed", ["Hello", "world"])
35
- puts embedding_response.data.first.embedding
34
+ embedding_response = mistral.create_embeddings(model: "mistral-embed", input: ["Hello", "world"])
35
+ puts embedding_response.data.first.inspect
36
36
 
37
37
  # List Available Models
38
38
  model_list_response = mistral.list_available_models
@@ -41,6 +41,21 @@ model_list_response.data.each do |model|
41
41
  end
42
42
  ```
43
43
 
44
+ Here is how to use streaming:
45
+
46
+ ```ruby
47
+ api = MistralAPI.new("api_key")
48
+
49
+ api.create_chat_completion(
50
+ model: 'mistral-tiny',
51
+ messages: [{ 'role' => 'user', 'content' => 'Who is Barack Obama ?' }],
52
+ stream: true
53
+ ) do |chunk|
54
+ puts chunk.inspect
55
+ end
56
+
57
+ ```
58
+
44
59
 
45
60
  ## Development
46
61
 
@@ -71,3 +71,34 @@ class Model
71
71
  @permissions = model_hash["permission"] # This could be further parsed into Permission objects if detailed parsing is required
72
72
  end
73
73
  end
74
+
75
+ class StreamedCompletionResponse
76
+ attr_reader :id, :object, :created, :model, :choices
77
+
78
+ def initialize(response_hash)
79
+ @id = response_hash["id"]
80
+ @object = response_hash["object"]
81
+ @created = response_hash["created"]
82
+ @model = response_hash["model"]
83
+ @choices = response_hash["choices"].map { |choice| StreamedChoice.new(choice) }
84
+ end
85
+ end
86
+
87
+ class StreamedChoice
88
+ attr_reader :index, :delta, :finish_reason
89
+
90
+ def initialize(choice_hash)
91
+ @index = choice_hash["index"]
92
+ @delta = Delta.new(choice_hash["delta"]) if choice_hash["delta"]
93
+ @finish_reason = choice_hash["finish_reason"]
94
+ end
95
+ end
96
+
97
+ class Delta
98
+ attr_reader :role, :content
99
+
100
+ def initialize(delta_hash)
101
+ @role = delta_hash["role"]
102
+ @content = delta_hash["content"]
103
+ end
104
+ end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module MistralRb
4
- VERSION = "0.1.1"
4
+ VERSION = "0.1.3"
5
5
  end
data/lib/mistral_rb.rb CHANGED
@@ -8,7 +8,7 @@ require_relative "mistral_rb/response_models"
8
8
  class MistralAPI
9
9
  include HTTParty
10
10
 
11
- def initialize(api_key, base_uri = "https://api.mistral.ai/v1")
11
+ def initialize(api_key:, base_uri: "https://api.mistral.ai/v1")
12
12
  @headers = {
13
13
  "Authorization" => "Bearer #{api_key}",
14
14
  "Content-Type" => "application/json"
@@ -16,7 +16,7 @@ class MistralAPI
16
16
  self.class.base_uri base_uri
17
17
  end
18
18
 
19
- def create_chat_completion(model, messages, temperature = 0.7, top_p = 1, max_tokens = nil, stream = false, safe_mode = false, random_seed = nil)
19
+ def create_chat_completion(model:, messages:, temperature: 0.7, top_p: 1, max_tokens: nil, stream: false, safe_mode: false, random_seed: nil)
20
20
  body = {
21
21
  model: model,
22
22
  messages: messages,
@@ -26,14 +26,23 @@ class MistralAPI
26
26
  stream: stream,
27
27
  safe_mode: safe_mode,
28
28
  random_seed: random_seed
29
- }.compact.to_json # compact to remove nil values
29
+ }.compact.to_json
30
30
 
31
- response = self.class.post("/chat/completions", body: body, headers: @headers)
32
- parsed_response = handle_response(response)
33
- CompletionResponse.new(parsed_response)
31
+ if stream
32
+ # Use on_data callback for streaming
33
+ self.class.post("/chat/completions", body: body, headers: @headers, stream_body: true) do |fragment, _, _|
34
+ processed_chunk = handle_stream_chunk(fragment)
35
+ yield(processed_chunk) if block_given? && processed_chunk
36
+ end
37
+ else
38
+ # Handle non-streaming response
39
+ response = self.class.post("/chat/completions", body: body, headers: @headers)
40
+ parsed_response = handle_response(response)
41
+ CompletionResponse.new(parsed_response)
42
+ end
34
43
  end
35
44
 
36
- def create_embeddings(model, input, encoding_format = "float")
45
+ def create_embeddings(model:, input:, encoding_format: "float")
37
46
  body = {
38
47
  model: model,
39
48
  input: input,
@@ -60,4 +69,20 @@ class MistralAPI
60
69
  raise "API Error: #{response.code} - #{response.body}"
61
70
  end
62
71
  end
72
+
73
+ def handle_stream_chunk(chunk)
74
+ # Skip processing if the chunk indicates the end of the stream.
75
+ return nil if chunk.strip == "data: [DONE]"
76
+
77
+ if chunk.strip.start_with?("data:")
78
+ data_content = chunk.split("data:").last.strip
79
+ begin
80
+ # Only parse the JSON content if it's not the end-of-stream indicator
81
+ json_content = JSON.parse(data_content)
82
+ StreamedCompletionResponse.new(json_content)
83
+ rescue JSON::ParserError => e
84
+ puts "Error parsing JSON: #{e.message}"
85
+ end
86
+ end
87
+ end
63
88
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: mistral_rb
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.1
4
+ version: 0.1.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Franck Stephane Ndzomga
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2023-12-22 00:00:00.000000000 Z
11
+ date: 2023-12-24 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: httparty