omniai-mistral 1.6.2 → 1.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +15 -6
- data/lib/omniai/mistral/chat.rb +2 -2
- data/lib/omniai/mistral/client.rb +8 -0
- data/lib/omniai/mistral/embed.rb +38 -0
- data/lib/omniai/mistral/version.rb +1 -1
- metadata +3 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: ed42e145797e63aa3725225494e85cbf842e5ee7720e0b703ae5d129e63d6312
|
4
|
+
data.tar.gz: a1169acf35660a06119ff591704ca6da9f94f25675dc97c2e5d26cb6060ddb71
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 04373ce5f992a56861931f2a1d1658d4e2e53b9f26706b1d32a19b25773c0a78659ba2f349723eb5e21bb099652874f84939146a227fe906de953b4014a4bcef
|
7
|
+
data.tar.gz: 06b52cea6dd021cc9c7ffb2cac03eca346f78e81f54eff8d63b059bc9f6c6eba7eea45a3216ec4d7e42ad72dcf1f0b442dfc0cfc91cb3cf2590322acde55196d
|
data/README.md
CHANGED
@@ -42,7 +42,7 @@ A chat completion is generated by passing in prompts using any a variety of form
|
|
42
42
|
|
43
43
|
```ruby
|
44
44
|
completion = client.chat('Tell me a joke!')
|
45
|
-
completion.
|
45
|
+
completion.text # 'Why did the chicken cross the road? To get to the other side.'
|
46
46
|
```
|
47
47
|
|
48
48
|
```ruby
|
@@ -50,7 +50,7 @@ completion = client.chat do |prompt|
|
|
50
50
|
prompt.system('You are a helpful assistant.')
|
51
51
|
prompt.user('What is the capital of Canada?')
|
52
52
|
end
|
53
|
-
completion.
|
53
|
+
completion.text # 'The capital of Canada is Ottawa.'
|
54
54
|
```
|
55
55
|
|
56
56
|
#### Model
|
@@ -59,7 +59,7 @@ completion.choice.message.content # 'The capital of Canada is Ottawa.'
|
|
59
59
|
|
60
60
|
```ruby
|
61
61
|
completion = client.chat('Provide code for fibonacci', model: OmniAI::Mistral::Chat::Model::CODESTRAL)
|
62
|
-
completion.
|
62
|
+
completion.text # 'def fibonacci(n)...end'
|
63
63
|
```
|
64
64
|
|
65
65
|
[Mistral API Reference `model`](https://docs.mistral.ai/getting-started/models/)
|
@@ -70,7 +70,7 @@ completion.choice.message.content # 'def fibonacci(n)...end'
|
|
70
70
|
|
71
71
|
```ruby
|
72
72
|
completion = client.chat('Pick a number between 1 and 5', temperature: 1.0)
|
73
|
-
completion.
|
73
|
+
completion.text # '3'
|
74
74
|
```
|
75
75
|
|
76
76
|
[Mistral API Reference `temperature`](https://docs.mistral.ai/api/)
|
@@ -81,7 +81,7 @@ completion.choice.message.content # '3'
|
|
81
81
|
|
82
82
|
```ruby
|
83
83
|
stream = proc do |chunk|
|
84
|
-
print(chunk.
|
84
|
+
print(chunk.text) # 'Better', 'three', 'hours', ...
|
85
85
|
end
|
86
86
|
client.chat('Be poetic.', stream:)
|
87
87
|
```
|
@@ -97,9 +97,18 @@ completion = client.chat(format: :json) do |prompt|
|
|
97
97
|
prompt.system(OmniAI::Chat::JSON_PROMPT)
|
98
98
|
prompt.user('What is the name of the drummer for the Beatles?')
|
99
99
|
end
|
100
|
-
JSON.parse(completion.
|
100
|
+
JSON.parse(completion.text) # { "name": "Ringo" }
|
101
101
|
```
|
102
102
|
|
103
103
|
[Mistral API Reference `response_format`](https://docs.mistral.ai/api/)
|
104
104
|
|
105
105
|
> When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message.
|
106
|
+
|
107
|
+
### Embed
|
108
|
+
|
109
|
+
Text can be converted into a vector embedding for similarity comparison usage via:
|
110
|
+
|
111
|
+
```ruby
|
112
|
+
response = client.embed('The quick brown fox jumps over a lazy dog.')
|
113
|
+
response.embedding # [0.0, ...]
|
114
|
+
```
|
data/lib/omniai/mistral/chat.rb
CHANGED
@@ -19,7 +19,7 @@ module OmniAI
|
|
19
19
|
CODESTRAL = 'codestral-latest'
|
20
20
|
end
|
21
21
|
|
22
|
-
DEFAULT_MODEL = Model::
|
22
|
+
DEFAULT_MODEL = Model::LARGE
|
23
23
|
|
24
24
|
module Role
|
25
25
|
ASSISTANT = 'assistant'
|
@@ -39,7 +39,7 @@ module OmniAI
|
|
39
39
|
stream: @stream.nil? ? nil : !@stream.nil?,
|
40
40
|
temperature: @temperature,
|
41
41
|
response_format: (JSON_RESPONSE_FORMAT if @format.eql?(:json)),
|
42
|
-
tools: @tools&.map(&:
|
42
|
+
tools: @tools&.map(&:serialize),
|
43
43
|
}).compact
|
44
44
|
end
|
45
45
|
|
@@ -58,6 +58,14 @@ module OmniAI
|
|
58
58
|
def chat(messages = nil, model: Chat::DEFAULT_MODEL, temperature: nil, format: nil, stream: nil, tools: nil, &)
|
59
59
|
Chat.process!(messages, model:, temperature:, format:, stream:, tools:, client: self, &)
|
60
60
|
end
|
61
|
+
|
62
|
+
# @raise [OmniAI::Error]
|
63
|
+
#
|
64
|
+
# @param input [String, Array<String>, Array<Integer>] required
|
65
|
+
# @param model [String] optional
|
66
|
+
def embed(input, model: Embed::DEFAULT_MODEL)
|
67
|
+
Embed.process!(input, model:, client: self)
|
68
|
+
end
|
61
69
|
end
|
62
70
|
end
|
63
71
|
end
|
@@ -0,0 +1,38 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
module Mistral
|
5
|
+
# An Mistral embed implementation.
|
6
|
+
#
|
7
|
+
# Usage:
|
8
|
+
#
|
9
|
+
# input = "..."
|
10
|
+
# response = OmniAI::Mistral::Embed.process!(input, client: client)
|
11
|
+
# response.embedding [0.0, ...]
|
12
|
+
class Embed < OmniAI::Embed
|
13
|
+
module Model
|
14
|
+
EMBED = 'mistral-embed'
|
15
|
+
end
|
16
|
+
|
17
|
+
DEFAULT_MODEL = Model::EMBED
|
18
|
+
|
19
|
+
protected
|
20
|
+
|
21
|
+
# @return [Hash]
|
22
|
+
def payload
|
23
|
+
{ model: @model, input: arrayify(@input) }
|
24
|
+
end
|
25
|
+
|
26
|
+
# @return [String]
|
27
|
+
def path
|
28
|
+
"/#{OmniAI::Mistral::Client::VERSION}/embeddings"
|
29
|
+
end
|
30
|
+
|
31
|
+
# @param [Object] value
|
32
|
+
# @return [Array]
|
33
|
+
def arrayify(value)
|
34
|
+
value.is_a?(Array) ? value : [value]
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: omniai-mistral
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.8.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Kevin Sylvestre
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-
|
11
|
+
date: 2024-08-16 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: event_stream_parser
|
@@ -65,6 +65,7 @@ files:
|
|
65
65
|
- lib/omniai/mistral/chat.rb
|
66
66
|
- lib/omniai/mistral/client.rb
|
67
67
|
- lib/omniai/mistral/config.rb
|
68
|
+
- lib/omniai/mistral/embed.rb
|
68
69
|
- lib/omniai/mistral/version.rb
|
69
70
|
homepage: https://github.com/ksylvest/omniai-mistral
|
70
71
|
licenses:
|