langchainrb 0.3.3 → 0.3.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +4 -0
- data/Gemfile.lock +12 -9
- data/README.md +14 -5
- data/examples/store_and_query_with_pinecone.rb +3 -0
- data/examples/store_and_query_with_qdrant.rb +3 -0
- data/examples/store_and_query_with_weaviate.rb +3 -0
- data/lib/agent/chain_of_thought_agent/chain_of_thought_agent.rb +0 -1
- data/lib/langchain.rb +1 -1
- data/lib/llm/hugging_face.rb +32 -0
- data/lib/llm/openai.rb +3 -2
- data/lib/logging.rb +1 -1
- data/lib/prompt/base.rb +1 -0
- data/lib/tool/base.rb +9 -2
- data/lib/tool/calculator.rb +6 -3
- data/lib/tool/serp_api.rb +15 -7
- data/lib/tool/wikipedia.rb +6 -3
- data/lib/vectorsearch/base.rb +2 -0
- data/lib/vectorsearch/milvus.rb +8 -20
- data/lib/vectorsearch/pinecone.rb +6 -16
- data/lib/vectorsearch/qdrant.rb +5 -12
- data/lib/vectorsearch/weaviate.rb +8 -23
- data/lib/version.rb +1 -1
- metadata +45 -30
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: a5a782dd2282ab5dd4aed3f1d0e421a4f9b227fa4c5450ed27f2f98a86af74f4
|
4
|
+
data.tar.gz: b47bb5d6789d7abb81f56ee1beb0b52323184f578475aec3e92fcc19b4a1314a
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 4e918b49b2b04a0e7009db732a5b24ab080a0d8d6b4c3be4084aa3a1492ddd6c1627d467b8bff4b34e1e5160b71622a75772ef92658dce2589b9542b8b0a8137
|
7
|
+
data.tar.gz: 3b465f1a05e614d64d416582aeaf7d998bf51422705331b220e08b00774d4beed4dcbc4e2a71b81bad87cddfe8c8f4c8db421d650e7161f16b26a1259922f71c
|
data/CHANGELOG.md
CHANGED
data/Gemfile.lock
CHANGED
@@ -1,15 +1,7 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
langchainrb (0.3.
|
5
|
-
eqn (~> 1.6.5)
|
6
|
-
google_search_results (~> 2.0.0)
|
7
|
-
milvus (~> 0.9.0)
|
8
|
-
pinecone (~> 0.1.6)
|
9
|
-
qdrant-ruby (~> 0.9.0)
|
10
|
-
ruby-openai (~> 4.0.0)
|
11
|
-
weaviate-ruby (~> 0.8.0)
|
12
|
-
wikipedia-client (~> 1.17.0)
|
4
|
+
langchainrb (0.3.4)
|
13
5
|
|
14
6
|
GEM
|
15
7
|
remote: https://rubygems.org/
|
@@ -125,6 +117,8 @@ GEM
|
|
125
117
|
httparty (0.21.0)
|
126
118
|
mini_mime (>= 1.0.0)
|
127
119
|
multi_xml (>= 0.5.2)
|
120
|
+
hugging-face (0.3.2)
|
121
|
+
faraday (~> 1.0)
|
128
122
|
i18n (1.13.0)
|
129
123
|
concurrent-ruby (~> 1.0)
|
130
124
|
ice_nine (0.11.2)
|
@@ -253,11 +247,20 @@ PLATFORMS
|
|
253
247
|
DEPENDENCIES
|
254
248
|
cohere-ruby (~> 0.9.3)
|
255
249
|
dotenv-rails (~> 2.7.6)
|
250
|
+
eqn (~> 1.6.5)
|
251
|
+
google_search_results (~> 2.0.0)
|
252
|
+
hugging-face (~> 0.3.2)
|
256
253
|
langchainrb!
|
254
|
+
milvus (~> 0.9.0)
|
255
|
+
pinecone (~> 0.1.6)
|
257
256
|
pry-byebug (~> 3.10.0)
|
257
|
+
qdrant-ruby (~> 0.9.0)
|
258
258
|
rake (~> 13.0)
|
259
259
|
rspec (~> 3.0)
|
260
|
+
ruby-openai (~> 4.0.0)
|
260
261
|
standardrb
|
262
|
+
weaviate-ruby (~> 0.8.0)
|
263
|
+
wikipedia-client (~> 1.17.0)
|
261
264
|
|
262
265
|
BUNDLED WITH
|
263
266
|
2.4.0
|
data/README.md
CHANGED
@@ -1,5 +1,5 @@
|
|
1
1
|
🦜️🔗 LangChain.rb
|
2
|
-
---
|
2
|
+
---
|
3
3
|
⚡ Building applications with LLMs through composability ⚡
|
4
4
|
|
5
5
|
👨💻👩💻 CURRENTLY SEEKING PEOPLE TO FORM THE CORE GROUP OF MAINTAINERS WITH
|
@@ -39,6 +39,8 @@ require "langchain"
|
|
39
39
|
|
40
40
|
Choose the LLM provider you'll be using (OpenAI or Cohere) and retrieve the API key.
|
41
41
|
|
42
|
+
Add `gem "weaviate-ruby", "~> 0.8.0"` to your Gemfile.
|
43
|
+
|
42
44
|
Pick the vector search database you'll be using and instantiate the client:
|
43
45
|
```ruby
|
44
46
|
client = Vectorsearch::Weaviate.new(
|
@@ -49,9 +51,9 @@ client = Vectorsearch::Weaviate.new(
|
|
49
51
|
)
|
50
52
|
|
51
53
|
# You can instantiate any other supported vector search database:
|
52
|
-
client = Vectorsearch::Milvus.new(...)
|
53
|
-
client = Vectorsearch::Qdrant.new(...)
|
54
|
-
client = Vectorsearch::Pinecone.new(...)
|
54
|
+
client = Vectorsearch::Milvus.new(...) # `gem "milvus", "~> 0.9.0"`
|
55
|
+
client = Vectorsearch::Qdrant.new(...) # `gem"qdrant-ruby", "~> 0.9.0"`
|
56
|
+
client = Vectorsearch::Pinecone.new(...) # `gem "pinecone", "~> 0.1.6"`
|
55
57
|
```
|
56
58
|
|
57
59
|
```ruby
|
@@ -92,6 +94,8 @@ client.ask(
|
|
92
94
|
|
93
95
|
### Using Standalone LLMs 🗣️
|
94
96
|
|
97
|
+
Add `gem "ruby-openai", "~> 4.0.0"` to your Gemfile.
|
98
|
+
|
95
99
|
#### OpenAI
|
96
100
|
```ruby
|
97
101
|
openai = LLM::OpenAI.new(api_key: ENV["OPENAI_API_KEY"])
|
@@ -116,6 +120,9 @@ cohere.embed(text: "foo bar")
|
|
116
120
|
cohere.complete(prompt: "What is the meaning of life?")
|
117
121
|
```
|
118
122
|
|
123
|
+
#### HuggingFace
|
124
|
+
Add `gem "hugging-face", "~> 0.3.2"` to your Gemfile.
|
125
|
+
|
119
126
|
### Using Prompts 📋
|
120
127
|
|
121
128
|
#### Prompt Templates
|
@@ -206,6 +213,8 @@ Agents are semi-autonomous bots that can respond to user questions and use avail
|
|
206
213
|
|
207
214
|
#### Chain-of-Thought Agent
|
208
215
|
|
216
|
+
Add `gem "openai-ruby"`, `gem "eqn"`, and `gem "google_search_results"` to your Gemfile
|
217
|
+
|
209
218
|
```ruby
|
210
219
|
agent = Agent::ChainOfThoughtAgent.new(llm: :openai, llm_api_key: ENV["OPENAI_API_KEY"], tools: ['search', 'calculator'])
|
211
220
|
|
@@ -237,7 +246,7 @@ LangChain.rb uses standard logging mechanisms and defaults to `:debug` level. Mo
|
|
237
246
|
To show all log messages:
|
238
247
|
|
239
248
|
```ruby
|
240
|
-
|
249
|
+
Langchain.logger.level = :info
|
241
250
|
```
|
242
251
|
|
243
252
|
## Development
|
data/lib/langchain.rb
CHANGED
@@ -20,6 +20,7 @@ end
|
|
20
20
|
module LLM
|
21
21
|
autoload :Base, "llm/base"
|
22
22
|
autoload :Cohere, "llm/cohere"
|
23
|
+
autoload :HuggingFace, "llm/hugging_face"
|
23
24
|
autoload :OpenAI, "llm/openai"
|
24
25
|
end
|
25
26
|
|
@@ -34,7 +35,6 @@ end
|
|
34
35
|
module Tool
|
35
36
|
autoload :Base, "tool/base"
|
36
37
|
autoload :Calculator, "tool/calculator"
|
37
|
-
autoload :News, "tool/news"
|
38
38
|
autoload :SerpApi, "tool/serp_api"
|
39
39
|
autoload :Wikipedia, "tool/wikipedia"
|
40
40
|
end
|
@@ -0,0 +1,32 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module LLM
|
4
|
+
class HuggingFace < Base
|
5
|
+
# The gem does not currently accept other models:
|
6
|
+
# https://github.com/alchaplinsky/hugging-face/blob/main/lib/hugging_face/inference_api.rb#L32-L34
|
7
|
+
DEFAULTS = {
|
8
|
+
embeddings_model_name: "sentence-transformers/all-MiniLM-L6-v2"
|
9
|
+
}.freeze
|
10
|
+
|
11
|
+
#
|
12
|
+
# Intialize the HuggingFace LLM
|
13
|
+
# @param api_key [String] The API key to use
|
14
|
+
#
|
15
|
+
def initialize(api_key:)
|
16
|
+
depends_on "hugging-face"
|
17
|
+
require "hugging_face"
|
18
|
+
|
19
|
+
@client = ::HuggingFace::InferenceApi.new(api_token: api_key)
|
20
|
+
end
|
21
|
+
|
22
|
+
# Generate an embedding for a given text
|
23
|
+
# @param text [String] The text to embed
|
24
|
+
# @return [Array] The embedding
|
25
|
+
def embed(text:)
|
26
|
+
response = client.embedding(
|
27
|
+
input: text,
|
28
|
+
model: DEFAULTS[:embeddings_model_name]
|
29
|
+
)
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
data/lib/llm/openai.rb
CHANGED
@@ -1,7 +1,5 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require "openai"
|
4
|
-
|
5
3
|
module LLM
|
6
4
|
class OpenAI < Base
|
7
5
|
DEFAULTS = {
|
@@ -12,6 +10,9 @@ module LLM
|
|
12
10
|
}.freeze
|
13
11
|
|
14
12
|
def initialize(api_key:)
|
13
|
+
depends_on "ruby-openai"
|
14
|
+
require "openai"
|
15
|
+
|
15
16
|
# TODO: Add support to pass `organization_id:`
|
16
17
|
@client = ::OpenAI::Client.new(access_token: api_key)
|
17
18
|
end
|
data/lib/logging.rb
CHANGED
@@ -4,7 +4,7 @@ require "logger"
|
|
4
4
|
|
5
5
|
module Langchain
|
6
6
|
def self.logger
|
7
|
-
@@logger ||= Logger.new(
|
7
|
+
@@logger ||= Logger.new($stdout, level: :warn, formatter: ->(severity, datetime, progname, msg) { "[LangChain.rb] #{msg}\n" })
|
8
8
|
end
|
9
9
|
|
10
10
|
def self.logger=(instance)
|
data/lib/prompt/base.rb
CHANGED
data/lib/tool/base.rb
CHANGED
@@ -20,11 +20,18 @@ module Tool
|
|
20
20
|
const_set(:DESCRIPTION, value.tr("\n", " ").strip)
|
21
21
|
end
|
22
22
|
|
23
|
-
#
|
23
|
+
# Instantiates and executes the tool and returns the answer
|
24
24
|
# @param input [String] input to the tool
|
25
25
|
# @return [String] answer
|
26
26
|
def self.execute(input:)
|
27
|
-
|
27
|
+
new.execute(input: input)
|
28
|
+
end
|
29
|
+
|
30
|
+
# Executes the tool and returns the answer
|
31
|
+
# @param input [String] input to the tool
|
32
|
+
# @return [String] answer
|
33
|
+
def execute(input:)
|
34
|
+
raise NotImplementedError, "Your tool must implement the `#execute(input:)` method that returns a string"
|
28
35
|
end
|
29
36
|
|
30
37
|
#
|
data/lib/tool/calculator.rb
CHANGED
@@ -1,7 +1,5 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require "eqn"
|
4
|
-
|
5
3
|
module Tool
|
6
4
|
class Calculator < Base
|
7
5
|
description <<~DESC
|
@@ -10,11 +8,16 @@ module Tool
|
|
10
8
|
The input to this tool should be a valid mathematical expression that could be executed by a simple calculator.
|
11
9
|
DESC
|
12
10
|
|
11
|
+
def initialize
|
12
|
+
depends_on "eqn"
|
13
|
+
require "eqn"
|
14
|
+
end
|
15
|
+
|
13
16
|
# Evaluates a pure math expression or if equation contains non-math characters (e.g.: "12F in Celsius") then
|
14
17
|
# it uses the google search calculator to evaluate the expression
|
15
18
|
# @param input [String] math expression
|
16
19
|
# @return [String] Answer
|
17
|
-
def
|
20
|
+
def execute(input:)
|
18
21
|
Eqn::Calculator.calc(input)
|
19
22
|
rescue Eqn::ParseError, Eqn::NoVariableValueError
|
20
23
|
# Sometimes the input is not a pure math expression, e.g: "12F in Celsius"
|
data/lib/tool/serp_api.rb
CHANGED
@@ -1,7 +1,5 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require "google_search_results"
|
4
|
-
|
5
3
|
module Tool
|
6
4
|
class SerpApi < Base
|
7
5
|
# Wrapper around SerpAPI
|
@@ -16,12 +14,25 @@ module Tool
|
|
16
14
|
Input should be a search query.
|
17
15
|
DESC
|
18
16
|
|
17
|
+
def initialize
|
18
|
+
depends_on "google_search_results"
|
19
|
+
require "google_search_results"
|
20
|
+
end
|
21
|
+
|
22
|
+
# Executes Google Search and returns hash_results JSON
|
23
|
+
# @param input [String] search query
|
24
|
+
# @return [Hash] hash_results JSON
|
25
|
+
|
26
|
+
def self.execute_search(input:)
|
27
|
+
new.execute_search(input: input)
|
28
|
+
end
|
29
|
+
|
19
30
|
# Executes Google Search and returns hash_results JSON
|
20
31
|
# @param input [String] search query
|
21
32
|
# @return [String] Answer
|
22
33
|
# TODO: Glance at all of the fields that langchain Python looks through: https://github.com/hwchase17/langchain/blob/v0.0.166/langchain/utilities/serpapi.py#L128-L156
|
23
34
|
# We may need to do the same thing here.
|
24
|
-
def
|
35
|
+
def execute(input:)
|
25
36
|
hash_results = execute_search(input: input)
|
26
37
|
|
27
38
|
hash_results.dig(:answer_box, :answer) ||
|
@@ -29,10 +40,7 @@ module Tool
|
|
29
40
|
hash_results.dig(:organic_results, 0, :snippet)
|
30
41
|
end
|
31
42
|
|
32
|
-
|
33
|
-
# @param input [String] search query
|
34
|
-
# @return [Hash] hash_results JSON
|
35
|
-
def self.execute_search(input:)
|
43
|
+
def execute_search(input:)
|
36
44
|
GoogleSearch.new(
|
37
45
|
q: input,
|
38
46
|
serp_api_key: ENV["SERPAPI_API_KEY"]
|
data/lib/tool/wikipedia.rb
CHANGED
@@ -1,7 +1,5 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require "wikipedia"
|
4
|
-
|
5
3
|
module Tool
|
6
4
|
class Wikipedia < Base
|
7
5
|
# Tool that adds the capability to search using the Wikipedia API
|
@@ -15,10 +13,15 @@ module Tool
|
|
15
13
|
Input should be a search query.
|
16
14
|
DESC
|
17
15
|
|
16
|
+
def initialize
|
17
|
+
depends_on "wikipedia-client"
|
18
|
+
require "wikipedia"
|
19
|
+
end
|
20
|
+
|
18
21
|
# Executes Wikipedia API search and returns the answer
|
19
22
|
# @param input [String] search query
|
20
23
|
# @return [String] Answer
|
21
|
-
def
|
24
|
+
def execute(input:)
|
22
25
|
page = ::Wikipedia.find(input)
|
23
26
|
# It would be nice to figure out a way to provide page.content but the LLM token limit is an issue
|
24
27
|
page.summary
|
data/lib/vectorsearch/base.rb
CHANGED
data/lib/vectorsearch/milvus.rb
CHANGED
@@ -1,24 +1,18 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require "milvus"
|
4
|
-
|
5
3
|
module Vectorsearch
|
6
4
|
class Milvus < Base
|
7
|
-
def initialize(
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
@client = ::Milvus::Client.new(
|
12
|
-
url: url
|
13
|
-
)
|
5
|
+
def initialize(url:, index_name:, llm:, llm_api_key:, api_key: nil)
|
6
|
+
depends_on "milvus"
|
7
|
+
require "milvus"
|
8
|
+
|
9
|
+
@client = ::Milvus::Client.new(url: url)
|
14
10
|
@index_name = index_name
|
15
11
|
|
16
12
|
super(llm: llm, llm_api_key: llm_api_key)
|
17
13
|
end
|
18
14
|
|
19
|
-
def add_texts(
|
20
|
-
texts:
|
21
|
-
)
|
15
|
+
def add_texts(texts:)
|
22
16
|
client.entities.insert(
|
23
17
|
collection_name: index_name,
|
24
18
|
num_rows: texts.count,
|
@@ -74,10 +68,7 @@ module Vectorsearch
|
|
74
68
|
)
|
75
69
|
end
|
76
70
|
|
77
|
-
def similarity_search(
|
78
|
-
query:,
|
79
|
-
k: 4
|
80
|
-
)
|
71
|
+
def similarity_search(query:, k: 4)
|
81
72
|
embedding = generate_embedding(text: query)
|
82
73
|
|
83
74
|
similarity_search_by_vector(
|
@@ -86,10 +77,7 @@ module Vectorsearch
|
|
86
77
|
)
|
87
78
|
end
|
88
79
|
|
89
|
-
def similarity_search_by_vector(
|
90
|
-
embedding:,
|
91
|
-
k: 4
|
92
|
-
)
|
80
|
+
def similarity_search_by_vector(embedding:, k: 4)
|
93
81
|
client.search(
|
94
82
|
collection_name: index_name,
|
95
83
|
top_k: k.to_s,
|
@@ -1,7 +1,5 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require "pinecone"
|
4
|
-
|
5
3
|
module Vectorsearch
|
6
4
|
class Pinecone < Base
|
7
5
|
# Initialize the Pinecone client
|
@@ -10,13 +8,10 @@ module Vectorsearch
|
|
10
8
|
# @param index_name [String] The name of the index to use
|
11
9
|
# @param llm [Symbol] The LLM to use
|
12
10
|
# @param llm_api_key [String] The API key for the LLM
|
13
|
-
def initialize(
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
llm:,
|
18
|
-
llm_api_key:
|
19
|
-
)
|
11
|
+
def initialize(environment:, api_key:, index_name:, llm:, llm_api_key:)
|
12
|
+
depends_on "pinecone"
|
13
|
+
require "pinecone"
|
14
|
+
|
20
15
|
::Pinecone.configure do |config|
|
21
16
|
config.api_key = api_key
|
22
17
|
config.environment = environment
|
@@ -31,9 +26,7 @@ module Vectorsearch
|
|
31
26
|
# Add a list of texts to the index
|
32
27
|
# @param texts [Array] The list of texts to add
|
33
28
|
# @return [Hash] The response from the server
|
34
|
-
def add_texts(
|
35
|
-
texts:
|
36
|
-
)
|
29
|
+
def add_texts(texts:)
|
37
30
|
vectors = texts.map do |text|
|
38
31
|
{
|
39
32
|
# TODO: Allows passing in your own IDs
|
@@ -78,10 +71,7 @@ module Vectorsearch
|
|
78
71
|
# @param embedding [Array] The embedding to search for
|
79
72
|
# @param k [Integer] The number of results to return
|
80
73
|
# @return [Array] The list of results
|
81
|
-
def similarity_search_by_vector(
|
82
|
-
embedding:,
|
83
|
-
k: 4
|
84
|
-
)
|
74
|
+
def similarity_search_by_vector(embedding:, k: 4)
|
85
75
|
index = client.index(index_name)
|
86
76
|
|
87
77
|
response = index.query(
|
data/lib/vectorsearch/qdrant.rb
CHANGED
@@ -1,7 +1,5 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require "qdrant"
|
4
|
-
|
5
3
|
module Vectorsearch
|
6
4
|
class Qdrant < Base
|
7
5
|
# Initialize the Qdrant client
|
@@ -10,13 +8,10 @@ module Vectorsearch
|
|
10
8
|
# @param index_name [String] The name of the index to use
|
11
9
|
# @param llm [Symbol] The LLM to use
|
12
10
|
# @param llm_api_key [String] The API key for the LLM
|
13
|
-
def initialize(
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
llm:,
|
18
|
-
llm_api_key:
|
19
|
-
)
|
11
|
+
def initialize(url:, api_key:, index_name:, llm:, llm_api_key:)
|
12
|
+
depends_on "qdrant-ruby"
|
13
|
+
require "qdrant"
|
14
|
+
|
20
15
|
@client = ::Qdrant::Client.new(
|
21
16
|
url: url,
|
22
17
|
api_key: api_key
|
@@ -29,9 +24,7 @@ module Vectorsearch
|
|
29
24
|
# Add a list of texts to the index
|
30
25
|
# @param texts [Array] The list of texts to add
|
31
26
|
# @return [Hash] The response from the server
|
32
|
-
def add_texts(
|
33
|
-
texts:
|
34
|
-
)
|
27
|
+
def add_texts(texts:)
|
35
28
|
batch = {ids: [], vectors: [], payloads: []}
|
36
29
|
|
37
30
|
texts.each do |text|
|
@@ -1,7 +1,5 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require "weaviate"
|
4
|
-
|
5
3
|
module Vectorsearch
|
6
4
|
class Weaviate < Base
|
7
5
|
# Initialize the Weaviate adapter
|
@@ -10,13 +8,10 @@ module Vectorsearch
|
|
10
8
|
# @param index_name [String] The name of the index to use
|
11
9
|
# @param llm [Symbol] The LLM to use
|
12
10
|
# @param llm_api_key [String] The API key for the LLM
|
13
|
-
def initialize(
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
llm:,
|
18
|
-
llm_api_key:
|
19
|
-
)
|
11
|
+
def initialize(url:, api_key:, index_name:, llm:, llm_api_key:)
|
12
|
+
depends_on "weaviate-ruby"
|
13
|
+
require "weaviate"
|
14
|
+
|
20
15
|
@client = ::Weaviate::Client.new(
|
21
16
|
url: url,
|
22
17
|
api_key: api_key,
|
@@ -31,9 +26,7 @@ module Vectorsearch
|
|
31
26
|
# Add a list of texts to the index
|
32
27
|
# @param texts [Array] The list of texts to add
|
33
28
|
# @return [Hash] The response from the server
|
34
|
-
def add_texts(
|
35
|
-
texts:
|
36
|
-
)
|
29
|
+
def add_texts(texts:)
|
37
30
|
objects = texts.map do |text|
|
38
31
|
{
|
39
32
|
class: index_name,
|
@@ -69,10 +62,7 @@ module Vectorsearch
|
|
69
62
|
# @param query [String] The query to search for
|
70
63
|
# @param k [Integer|String] The number of results to return
|
71
64
|
# @return [Hash] The search results
|
72
|
-
def similarity_search(
|
73
|
-
query:,
|
74
|
-
k: 4
|
75
|
-
)
|
65
|
+
def similarity_search(query:, k: 4)
|
76
66
|
near_text = "{ concepts: [\"#{query}\"] }"
|
77
67
|
|
78
68
|
client.query.get(
|
@@ -87,10 +77,7 @@ module Vectorsearch
|
|
87
77
|
# @param embedding [Array] The vector to search for
|
88
78
|
# @param k [Integer|String] The number of results to return
|
89
79
|
# @return [Hash] The search results
|
90
|
-
def similarity_search_by_vector(
|
91
|
-
embedding:,
|
92
|
-
k: 4
|
93
|
-
)
|
80
|
+
def similarity_search_by_vector(embedding:, k: 4)
|
94
81
|
near_vector = "{ vector: #{embedding} }"
|
95
82
|
|
96
83
|
client.query.get(
|
@@ -104,9 +91,7 @@ module Vectorsearch
|
|
104
91
|
# Ask a question and return the answer
|
105
92
|
# @param question [String] The question to ask
|
106
93
|
# @return [Hash] The answer
|
107
|
-
def ask(
|
108
|
-
question:
|
109
|
-
)
|
94
|
+
def ask(question:)
|
110
95
|
# Weaviate currently supports the `ask:` parameter only for the OpenAI LLM (with `qna-openai` module enabled).
|
111
96
|
# The Cohere support is on the way: https://github.com/weaviate/weaviate/pull/2600
|
112
97
|
if llm == :openai
|
data/lib/version.rb
CHANGED
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: langchainrb
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.3.
|
4
|
+
version: 0.3.4
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Andrei Bondarev
|
@@ -11,47 +11,47 @@ cert_chain: []
|
|
11
11
|
date: 2023-05-16 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
|
-
name:
|
14
|
+
name: dotenv-rails
|
15
15
|
requirement: !ruby/object:Gem::Requirement
|
16
16
|
requirements:
|
17
17
|
- - "~>"
|
18
18
|
- !ruby/object:Gem::Version
|
19
|
-
version:
|
19
|
+
version: 2.7.6
|
20
20
|
type: :development
|
21
21
|
prerelease: false
|
22
22
|
version_requirements: !ruby/object:Gem::Requirement
|
23
23
|
requirements:
|
24
24
|
- - "~>"
|
25
25
|
- !ruby/object:Gem::Version
|
26
|
-
version:
|
26
|
+
version: 2.7.6
|
27
27
|
- !ruby/object:Gem::Dependency
|
28
|
-
name:
|
28
|
+
name: pry-byebug
|
29
29
|
requirement: !ruby/object:Gem::Requirement
|
30
30
|
requirements:
|
31
31
|
- - "~>"
|
32
32
|
- !ruby/object:Gem::Version
|
33
|
-
version:
|
33
|
+
version: 3.10.0
|
34
34
|
type: :development
|
35
35
|
prerelease: false
|
36
36
|
version_requirements: !ruby/object:Gem::Requirement
|
37
37
|
requirements:
|
38
38
|
- - "~>"
|
39
39
|
- !ruby/object:Gem::Version
|
40
|
-
version:
|
40
|
+
version: 3.10.0
|
41
41
|
- !ruby/object:Gem::Dependency
|
42
|
-
name:
|
42
|
+
name: cohere-ruby
|
43
43
|
requirement: !ruby/object:Gem::Requirement
|
44
44
|
requirements:
|
45
45
|
- - "~>"
|
46
46
|
- !ruby/object:Gem::Version
|
47
|
-
version:
|
47
|
+
version: 0.9.3
|
48
48
|
type: :development
|
49
49
|
prerelease: false
|
50
50
|
version_requirements: !ruby/object:Gem::Requirement
|
51
51
|
requirements:
|
52
52
|
- - "~>"
|
53
53
|
- !ruby/object:Gem::Version
|
54
|
-
version:
|
54
|
+
version: 0.9.3
|
55
55
|
- !ruby/object:Gem::Dependency
|
56
56
|
name: eqn
|
57
57
|
requirement: !ruby/object:Gem::Requirement
|
@@ -59,7 +59,7 @@ dependencies:
|
|
59
59
|
- - "~>"
|
60
60
|
- !ruby/object:Gem::Version
|
61
61
|
version: 1.6.5
|
62
|
-
type: :
|
62
|
+
type: :development
|
63
63
|
prerelease: false
|
64
64
|
version_requirements: !ruby/object:Gem::Requirement
|
65
65
|
requirements:
|
@@ -67,47 +67,61 @@ dependencies:
|
|
67
67
|
- !ruby/object:Gem::Version
|
68
68
|
version: 1.6.5
|
69
69
|
- !ruby/object:Gem::Dependency
|
70
|
-
name:
|
70
|
+
name: google_search_results
|
71
71
|
requirement: !ruby/object:Gem::Requirement
|
72
72
|
requirements:
|
73
73
|
- - "~>"
|
74
74
|
- !ruby/object:Gem::Version
|
75
|
-
version: 0.
|
76
|
-
type: :
|
75
|
+
version: 2.0.0
|
76
|
+
type: :development
|
77
77
|
prerelease: false
|
78
78
|
version_requirements: !ruby/object:Gem::Requirement
|
79
79
|
requirements:
|
80
80
|
- - "~>"
|
81
81
|
- !ruby/object:Gem::Version
|
82
|
-
version: 0.
|
82
|
+
version: 2.0.0
|
83
83
|
- !ruby/object:Gem::Dependency
|
84
|
-
name:
|
84
|
+
name: hugging-face
|
85
85
|
requirement: !ruby/object:Gem::Requirement
|
86
86
|
requirements:
|
87
87
|
- - "~>"
|
88
88
|
- !ruby/object:Gem::Version
|
89
|
-
version: 0.
|
90
|
-
type: :
|
89
|
+
version: 0.3.2
|
90
|
+
type: :development
|
91
91
|
prerelease: false
|
92
92
|
version_requirements: !ruby/object:Gem::Requirement
|
93
93
|
requirements:
|
94
94
|
- - "~>"
|
95
95
|
- !ruby/object:Gem::Version
|
96
|
-
version: 0.
|
96
|
+
version: 0.3.2
|
97
97
|
- !ruby/object:Gem::Dependency
|
98
|
-
name:
|
98
|
+
name: milvus
|
99
99
|
requirement: !ruby/object:Gem::Requirement
|
100
100
|
requirements:
|
101
101
|
- - "~>"
|
102
102
|
- !ruby/object:Gem::Version
|
103
|
-
version:
|
104
|
-
type: :
|
103
|
+
version: 0.9.0
|
104
|
+
type: :development
|
105
105
|
prerelease: false
|
106
106
|
version_requirements: !ruby/object:Gem::Requirement
|
107
107
|
requirements:
|
108
108
|
- - "~>"
|
109
109
|
- !ruby/object:Gem::Version
|
110
|
-
version:
|
110
|
+
version: 0.9.0
|
111
|
+
- !ruby/object:Gem::Dependency
|
112
|
+
name: pinecone
|
113
|
+
requirement: !ruby/object:Gem::Requirement
|
114
|
+
requirements:
|
115
|
+
- - "~>"
|
116
|
+
- !ruby/object:Gem::Version
|
117
|
+
version: 0.1.6
|
118
|
+
type: :development
|
119
|
+
prerelease: false
|
120
|
+
version_requirements: !ruby/object:Gem::Requirement
|
121
|
+
requirements:
|
122
|
+
- - "~>"
|
123
|
+
- !ruby/object:Gem::Version
|
124
|
+
version: 0.1.6
|
111
125
|
- !ruby/object:Gem::Dependency
|
112
126
|
name: qdrant-ruby
|
113
127
|
requirement: !ruby/object:Gem::Requirement
|
@@ -115,7 +129,7 @@ dependencies:
|
|
115
129
|
- - "~>"
|
116
130
|
- !ruby/object:Gem::Version
|
117
131
|
version: 0.9.0
|
118
|
-
type: :
|
132
|
+
type: :development
|
119
133
|
prerelease: false
|
120
134
|
version_requirements: !ruby/object:Gem::Requirement
|
121
135
|
requirements:
|
@@ -123,19 +137,19 @@ dependencies:
|
|
123
137
|
- !ruby/object:Gem::Version
|
124
138
|
version: 0.9.0
|
125
139
|
- !ruby/object:Gem::Dependency
|
126
|
-
name:
|
140
|
+
name: ruby-openai
|
127
141
|
requirement: !ruby/object:Gem::Requirement
|
128
142
|
requirements:
|
129
143
|
- - "~>"
|
130
144
|
- !ruby/object:Gem::Version
|
131
|
-
version:
|
132
|
-
type: :
|
145
|
+
version: 4.0.0
|
146
|
+
type: :development
|
133
147
|
prerelease: false
|
134
148
|
version_requirements: !ruby/object:Gem::Requirement
|
135
149
|
requirements:
|
136
150
|
- - "~>"
|
137
151
|
- !ruby/object:Gem::Version
|
138
|
-
version:
|
152
|
+
version: 4.0.0
|
139
153
|
- !ruby/object:Gem::Dependency
|
140
154
|
name: weaviate-ruby
|
141
155
|
requirement: !ruby/object:Gem::Requirement
|
@@ -143,7 +157,7 @@ dependencies:
|
|
143
157
|
- - "~>"
|
144
158
|
- !ruby/object:Gem::Version
|
145
159
|
version: 0.8.0
|
146
|
-
type: :
|
160
|
+
type: :development
|
147
161
|
prerelease: false
|
148
162
|
version_requirements: !ruby/object:Gem::Requirement
|
149
163
|
requirements:
|
@@ -157,7 +171,7 @@ dependencies:
|
|
157
171
|
- - "~>"
|
158
172
|
- !ruby/object:Gem::Version
|
159
173
|
version: 1.17.0
|
160
|
-
type: :
|
174
|
+
type: :development
|
161
175
|
prerelease: false
|
162
176
|
version_requirements: !ruby/object:Gem::Requirement
|
163
177
|
requirements:
|
@@ -191,6 +205,7 @@ files:
|
|
191
205
|
- lib/langchain.rb
|
192
206
|
- lib/llm/base.rb
|
193
207
|
- lib/llm/cohere.rb
|
208
|
+
- lib/llm/hugging_face.rb
|
194
209
|
- lib/llm/openai.rb
|
195
210
|
- lib/logging.rb
|
196
211
|
- lib/prompt/base.rb
|