langchainrb 0.10.0 → 0.10.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +7 -0
- data/lib/langchain/llm/mistral_ai.rb +68 -0
- data/lib/langchain/llm/response/mistral_ai_response.rb +39 -0
- data/lib/langchain/tool/ruby_code_interpreter/ruby_code_interpreter.rb +30 -34
- data/lib/langchain/version.rb +1 -1
- data/lib/langchain.rb +7 -0
- metadata +19 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 19b23746583868d1a5eca30d5e8f30bf548cfc253d8cf20fa13c27bb4e03b967
|
4
|
+
data.tar.gz: 6de6bea0a348b812d09745af6f14c03def1f8d7a4fb7a246ab328de4992b9e8e
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 12e39b5c0c523d7ea798f4fc0729fc53f0e0754db43261ec9c464369a4a01fea18ef7c30bee9d26b45a08add746cd45b0550f1a741dda0f84ae4cd72be9481d1
|
7
|
+
data.tar.gz: abf235f4d1dffd76d4a73a45a8e3e3af81ed358eadb43189ccef6cd8d3ff7739c8e1fcb38287c786dd56de6edf8944efc16270936cd6d4e4763e2ec4f7c84eea
|
data/CHANGELOG.md
CHANGED
@@ -0,0 +1,68 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Langchain::LLM
|
4
|
+
# Gem requirements:
|
5
|
+
# gem "mistral-ai"
|
6
|
+
#
|
7
|
+
# Usage:
|
8
|
+
# llm = Langchain::LLM::MistralAI.new(api_key: ENV["OPENAI_API_KEY"])
|
9
|
+
class MistralAI < Base
|
10
|
+
DEFAULTS = {
|
11
|
+
chat_completion_model_name: "mistral-medium",
|
12
|
+
embeddings_model_name: "mistral-embed"
|
13
|
+
}.freeze
|
14
|
+
|
15
|
+
attr_reader :defaults
|
16
|
+
|
17
|
+
def initialize(api_key:, default_options: {})
|
18
|
+
depends_on "mistral-ai"
|
19
|
+
|
20
|
+
@client = Mistral.new(
|
21
|
+
credentials: {api_key: ENV["MISTRAL_AI_API_KEY"]},
|
22
|
+
options: {server_sent_events: true}
|
23
|
+
)
|
24
|
+
|
25
|
+
@defaults = DEFAULTS.merge(default_options)
|
26
|
+
end
|
27
|
+
|
28
|
+
def chat(
|
29
|
+
messages:,
|
30
|
+
model: defaults[:chat_completion_model_name],
|
31
|
+
temperature: nil,
|
32
|
+
top_p: nil,
|
33
|
+
max_tokens: nil,
|
34
|
+
safe_prompt: nil,
|
35
|
+
random_seed: nil
|
36
|
+
)
|
37
|
+
params = {
|
38
|
+
messages: messages,
|
39
|
+
model: model
|
40
|
+
}
|
41
|
+
params[:temperature] = temperature if temperature
|
42
|
+
params[:top_p] = top_p if top_p
|
43
|
+
params[:max_tokens] = max_tokens if max_tokens
|
44
|
+
params[:safe_prompt] = safe_prompt if safe_prompt
|
45
|
+
params[:random_seed] = random_seed if random_seed
|
46
|
+
|
47
|
+
response = client.chat_completions(params)
|
48
|
+
|
49
|
+
Langchain::LLM::MistralAIResponse.new(response.to_h)
|
50
|
+
end
|
51
|
+
|
52
|
+
def embed(
|
53
|
+
text:,
|
54
|
+
model: defaults[:embeddings_model_name],
|
55
|
+
encoding_format: nil
|
56
|
+
)
|
57
|
+
params = {
|
58
|
+
input: text,
|
59
|
+
model: model
|
60
|
+
}
|
61
|
+
params[:encoding_format] = encoding_format if encoding_format
|
62
|
+
|
63
|
+
response = client.embeddings(params)
|
64
|
+
|
65
|
+
Langchain::LLM::MistralAIResponse.new(response.to_h)
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
@@ -0,0 +1,39 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Langchain::LLM
|
4
|
+
class MistralAIResponse < BaseResponse
|
5
|
+
def model
|
6
|
+
raw_response["model"]
|
7
|
+
end
|
8
|
+
|
9
|
+
def chat_completion
|
10
|
+
raw_response.dig("choices", 0, "message", "content")
|
11
|
+
end
|
12
|
+
|
13
|
+
def role
|
14
|
+
raw_response.dig("choices", 0, "message", "role")
|
15
|
+
end
|
16
|
+
|
17
|
+
def embedding
|
18
|
+
raw_response.dig("data", 0, "embedding")
|
19
|
+
end
|
20
|
+
|
21
|
+
def prompt_tokens
|
22
|
+
raw_response.dig("usage", "prompt_tokens")
|
23
|
+
end
|
24
|
+
|
25
|
+
def total_tokens
|
26
|
+
raw_response.dig("usage", "total_tokens")
|
27
|
+
end
|
28
|
+
|
29
|
+
def completion_tokens
|
30
|
+
raw_response.dig("usage", "completion_tokens")
|
31
|
+
end
|
32
|
+
|
33
|
+
def created_at
|
34
|
+
if raw_response.dig("created_at")
|
35
|
+
Time.at(raw_response.dig("created_at"))
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
@@ -1,45 +1,41 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
#
|
17
|
-
NAME = "ruby_code_interpreter"
|
18
|
-
ANNOTATIONS_PATH = Langchain.root.join("./langchain/tool/#{NAME}/#{NAME}.json").to_path
|
3
|
+
module Langchain::Tool
|
4
|
+
class RubyCodeInterpreter < Base
|
5
|
+
#
|
6
|
+
# A tool that execute Ruby code in a sandboxed environment.
|
7
|
+
#
|
8
|
+
# Gem requirements:
|
9
|
+
# gem "safe_ruby", "~> 1.0.4"
|
10
|
+
#
|
11
|
+
# Usage:
|
12
|
+
# interpreter = Langchain::Tool::RubyCodeInterpreter.new
|
13
|
+
#
|
14
|
+
NAME = "ruby_code_interpreter"
|
15
|
+
ANNOTATIONS_PATH = Langchain.root.join("./langchain/tool/#{NAME}/#{NAME}.json").to_path
|
19
16
|
|
20
|
-
|
21
|
-
|
22
|
-
|
17
|
+
description <<~DESC
|
18
|
+
A Ruby code interpreter. Use this to execute ruby expressions. Input should be a valid ruby expression. If you want to see the output of the tool, make sure to return a value.
|
19
|
+
DESC
|
23
20
|
|
24
|
-
|
25
|
-
|
21
|
+
def initialize(timeout: 30)
|
22
|
+
depends_on "safe_ruby"
|
26
23
|
|
27
|
-
|
28
|
-
|
24
|
+
@timeout = timeout
|
25
|
+
end
|
29
26
|
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
27
|
+
# Executes Ruby code in a sandboxes environment.
|
28
|
+
#
|
29
|
+
# @param input [String] ruby code expression
|
30
|
+
# @return [String] Answer
|
31
|
+
def execute(input:)
|
32
|
+
Langchain.logger.info("Executing \"#{input}\"", for: self.class)
|
36
33
|
|
37
|
-
|
38
|
-
|
34
|
+
safe_eval(input)
|
35
|
+
end
|
39
36
|
|
40
|
-
|
41
|
-
|
42
|
-
end
|
37
|
+
def safe_eval(code)
|
38
|
+
SafeRuby.eval(code, timeout: @timeout)
|
43
39
|
end
|
44
40
|
end
|
45
41
|
end
|
data/lib/langchain/version.rb
CHANGED
data/lib/langchain.rb
CHANGED
@@ -16,6 +16,8 @@ loader.inflector.inflect(
|
|
16
16
|
"json" => "JSON",
|
17
17
|
"jsonl" => "JSONL",
|
18
18
|
"llm" => "LLM",
|
19
|
+
"mistral_ai" => "MistralAI",
|
20
|
+
"mistral_ai_response" => "MistralAIResponse",
|
19
21
|
"openai" => "OpenAI",
|
20
22
|
"openai_validator" => "OpenAIValidator",
|
21
23
|
"openai_response" => "OpenAIResponse",
|
@@ -32,6 +34,11 @@ loader.collapse("#{__dir__}/langchain/tool/google_search")
|
|
32
34
|
loader.collapse("#{__dir__}/langchain/tool/ruby_code_interpreter")
|
33
35
|
loader.collapse("#{__dir__}/langchain/tool/weather")
|
34
36
|
loader.collapse("#{__dir__}/langchain/tool/wikipedia")
|
37
|
+
|
38
|
+
# RubyCodeInterpreter does not work with Ruby 3.3;
|
39
|
+
# https://github.com/ukutaht/safe_ruby/issues/4
|
40
|
+
loader.ignore("#{__dir__}/langchain/tool/ruby_code_interpreter") if RUBY_VERSION >= "3.3.0"
|
41
|
+
|
35
42
|
loader.setup
|
36
43
|
|
37
44
|
# Langchain.rb a is library for building LLM-backed Ruby applications. It is an abstraction layer that sits on top of the emerging AI-related tools that makes it easy for developers to consume and string those services together.
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: langchainrb
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.10.
|
4
|
+
version: 0.10.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Andrei Bondarev
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-03-
|
11
|
+
date: 2024-03-21 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: activesupport
|
@@ -472,6 +472,20 @@ dependencies:
|
|
472
472
|
- - "~>"
|
473
473
|
- !ruby/object:Gem::Version
|
474
474
|
version: '2.8'
|
475
|
+
- !ruby/object:Gem::Dependency
|
476
|
+
name: mistral-ai
|
477
|
+
requirement: !ruby/object:Gem::Requirement
|
478
|
+
requirements:
|
479
|
+
- - ">="
|
480
|
+
- !ruby/object:Gem::Version
|
481
|
+
version: '0'
|
482
|
+
type: :development
|
483
|
+
prerelease: false
|
484
|
+
version_requirements: !ruby/object:Gem::Requirement
|
485
|
+
requirements:
|
486
|
+
- - ">="
|
487
|
+
- !ruby/object:Gem::Version
|
488
|
+
version: '0'
|
475
489
|
- !ruby/object:Gem::Dependency
|
476
490
|
name: open-weather-ruby-client
|
477
491
|
requirement: !ruby/object:Gem::Requirement
|
@@ -718,6 +732,7 @@ files:
|
|
718
732
|
- lib/langchain/llm/google_vertex_ai.rb
|
719
733
|
- lib/langchain/llm/hugging_face.rb
|
720
734
|
- lib/langchain/llm/llama_cpp.rb
|
735
|
+
- lib/langchain/llm/mistral_ai.rb
|
721
736
|
- lib/langchain/llm/ollama.rb
|
722
737
|
- lib/langchain/llm/openai.rb
|
723
738
|
- lib/langchain/llm/prompts/ollama/summarize_template.yaml
|
@@ -732,6 +747,7 @@ files:
|
|
732
747
|
- lib/langchain/llm/response/google_vertex_ai_response.rb
|
733
748
|
- lib/langchain/llm/response/hugging_face_response.rb
|
734
749
|
- lib/langchain/llm/response/llama_cpp_response.rb
|
750
|
+
- lib/langchain/llm/response/mistral_ai_response.rb
|
735
751
|
- lib/langchain/llm/response/ollama_response.rb
|
736
752
|
- lib/langchain/llm/response/openai_response.rb
|
737
753
|
- lib/langchain/llm/response/replicate_response.rb
|
@@ -806,7 +822,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
806
822
|
requirements:
|
807
823
|
- - ">="
|
808
824
|
- !ruby/object:Gem::Version
|
809
|
-
version: 3.
|
825
|
+
version: 3.1.0
|
810
826
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
811
827
|
requirements:
|
812
828
|
- - ">="
|