langchainrb 0.3.2 → 0.3.3

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: e751a38f6a248db9aabfac3f1b9bc547f61304dd366be999005941045a5adcd6
4
- data.tar.gz: 56db69c0a578bdf198bfc12cb53e92f0e1d15654a1dfa29715d5d7833550da33
3
+ metadata.gz: 735fb9932ea28c3d6f3938b964e4edd5d030ed4c12c46a9f35293d35469cf952
4
+ data.tar.gz: 66a8030ea90c19c1950c4136c48677b6490df948dc2693562eca7b8a370dc3ae
5
5
  SHA512:
6
- metadata.gz: bdd3060863a967b48a6123ea379d9a98632730a726b8da9924bb0061511b8bf88fa05f36c49ac4b8f136472e9aa204c4bd09929d31e0f2c0700ad16361a1a8cf
7
- data.tar.gz: 88f04804d10f51d639b8643bd683b8a420920fc4ad2d26ef6c45f4fac034ec3f8b54787de51c4799f4a074029ad5649efca6e950b29fef62b96ce9452e5ec8cc
6
+ metadata.gz: 426ae91fc1c2d297b84758f6fe87d62a6dbaf4614776ab7d247c994ce849a979d1752d1241963f7fd3b8d71a68dc6f576660daabcacf19159b63acdda65fa7d9
7
+ data.tar.gz: 4acd1a02be734cccc431499c0e1659694004f8ee30b27b99d51b580373c4a410e87f8bf96edfeec38e67f1066d34441ccbb4ce6e4b6597623f606876e3fcb16f
data/CHANGELOG.md CHANGED
@@ -1,5 +1,10 @@
1
1
  ## [Unreleased]
2
2
 
3
+ ## [0.3.3] - 2023-05-16
4
+ - Dependencies are now optionally loaded and required at runtime
5
+ - Start using `standardrb` for linting
6
+ - Use the Ruby logger
7
+
3
8
  ## [0.3.2] - 2023-05-15
4
9
  - Agents
5
10
  - Fix Chain of Thought prompt loader
data/Gemfile CHANGED
@@ -8,3 +8,5 @@ gemspec
8
8
  gem "rake", "~> 13.0"
9
9
 
10
10
  gem "rspec", "~> 3.0"
11
+
12
+ gem "standardrb"
data/Gemfile.lock CHANGED
@@ -1,8 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- langchainrb (0.3.2)
5
- cohere-ruby (~> 0.9.3)
4
+ langchainrb (0.3.3)
6
5
  eqn (~> 1.6.5)
7
6
  google_search_results (~> 2.0.0)
8
7
  milvus (~> 0.9.0)
@@ -35,6 +34,7 @@ GEM
35
34
  tzinfo (~> 2.0)
36
35
  addressable (2.8.4)
37
36
  public_suffix (>= 2.0.2, < 6.0)
37
+ ast (2.4.2)
38
38
  builder (3.2.4)
39
39
  byebug (11.1.3)
40
40
  coderay (1.1.3)
@@ -128,6 +128,9 @@ GEM
128
128
  i18n (1.13.0)
129
129
  concurrent-ruby (~> 1.0)
130
130
  ice_nine (0.11.2)
131
+ json (2.6.3)
132
+ language_server-protocol (3.17.0.3)
133
+ lint_roller (1.0.0)
131
134
  loofah (2.21.1)
132
135
  crass (~> 1.0.2)
133
136
  nokogiri (>= 1.5.9)
@@ -138,10 +141,15 @@ GEM
138
141
  minitest (5.18.0)
139
142
  multi_xml (0.6.0)
140
143
  multipart-post (2.3.0)
144
+ nokogiri (1.14.3-arm64-darwin)
145
+ racc (~> 1.4)
141
146
  nokogiri (1.14.3-x86_64-darwin)
142
147
  racc (~> 1.4)
143
148
  nokogiri (1.14.3-x86_64-linux)
144
149
  racc (~> 1.4)
150
+ parallel (1.23.0)
151
+ parser (3.2.2.1)
152
+ ast (~> 2.4.1)
145
153
  pinecone (0.1.71)
146
154
  dry-struct (~> 1.6.0)
147
155
  dry-validation (~> 1.10.0)
@@ -173,7 +181,10 @@ GEM
173
181
  rake (>= 12.2)
174
182
  thor (~> 1.0)
175
183
  zeitwerk (~> 2.5)
184
+ rainbow (3.1.1)
176
185
  rake (13.0.6)
186
+ regexp_parser (2.8.0)
187
+ rexml (3.2.5)
177
188
  rspec (3.12.0)
178
189
  rspec-core (~> 3.12.0)
179
190
  rspec-expectations (~> 3.12.0)
@@ -187,15 +198,45 @@ GEM
187
198
  diff-lcs (>= 1.2.0, < 2.0)
188
199
  rspec-support (~> 3.12.0)
189
200
  rspec-support (3.12.0)
201
+ rubocop (1.50.2)
202
+ json (~> 2.3)
203
+ parallel (~> 1.10)
204
+ parser (>= 3.2.0.0)
205
+ rainbow (>= 2.2.2, < 4.0)
206
+ regexp_parser (>= 1.8, < 3.0)
207
+ rexml (>= 3.2.5, < 4.0)
208
+ rubocop-ast (>= 1.28.0, < 2.0)
209
+ ruby-progressbar (~> 1.7)
210
+ unicode-display_width (>= 2.4.0, < 3.0)
211
+ rubocop-ast (1.28.1)
212
+ parser (>= 3.2.1.0)
213
+ rubocop-performance (1.16.0)
214
+ rubocop (>= 1.7.0, < 2.0)
215
+ rubocop-ast (>= 0.4.0)
190
216
  ruby-openai (4.0.0)
191
217
  faraday (>= 1)
192
218
  faraday-multipart (>= 1)
219
+ ruby-progressbar (1.13.0)
193
220
  ruby2_keywords (0.0.5)
221
+ standard (1.28.2)
222
+ language_server-protocol (~> 3.17.0.2)
223
+ lint_roller (~> 1.0)
224
+ rubocop (~> 1.50.2)
225
+ standard-custom (~> 1.0.0)
226
+ standard-performance (~> 1.0.1)
227
+ standard-custom (1.0.0)
228
+ lint_roller (~> 1.0)
229
+ standard-performance (1.0.1)
230
+ lint_roller (~> 1.0)
231
+ rubocop-performance (~> 1.16.0)
232
+ standardrb (1.0.1)
233
+ standard
194
234
  thor (1.2.1)
195
235
  treetop (1.6.12)
196
236
  polyglot (~> 0.3)
197
237
  tzinfo (2.0.6)
198
238
  concurrent-ruby (~> 1.0)
239
+ unicode-display_width (2.4.2)
199
240
  weaviate-ruby (0.8.1)
200
241
  faraday (~> 1)
201
242
  faraday_middleware (~> 1)
@@ -205,15 +246,18 @@ GEM
205
246
  zeitwerk (2.6.8)
206
247
 
207
248
  PLATFORMS
249
+ arm64-darwin-22
208
250
  x86_64-darwin-19
209
251
  x86_64-linux
210
252
 
211
253
  DEPENDENCIES
254
+ cohere-ruby (~> 0.9.3)
212
255
  dotenv-rails (~> 2.7.6)
213
256
  langchainrb!
214
257
  pry-byebug (~> 3.10.0)
215
258
  rake (~> 13.0)
216
259
  rspec (~> 3.0)
260
+ standardrb
217
261
 
218
262
  BUNDLED WITH
219
263
  2.4.0
data/README.md CHANGED
@@ -104,6 +104,8 @@ openai.complete(prompt: "What is the meaning of life?")
104
104
  ```
105
105
 
106
106
  #### Cohere
107
+ Add `gem "cohere-ruby", "~> 0.9.3"` to your Gemfile.
108
+
107
109
  ```ruby
108
110
  cohere = LLM::Cohere.new(api_key: ENV["COHERE_API_KEY"])
109
111
  ```
@@ -211,7 +213,7 @@ agent.tools
211
213
  # => ["search", "calculator"]
212
214
  ```
213
215
  ```ruby
214
- agent.run(question: "How many full soccer fields would be needed to cover the distance between NYC and DC in a straight line?", logging: true)
216
+ agent.run(question: "How many full soccer fields would be needed to cover the distance between NYC and DC in a straight line?")
215
217
  #=> "Approximately 2,945 soccer fields would be needed to cover the distance between NYC and DC in a straight line."
216
218
  ```
217
219
 
@@ -228,6 +230,16 @@ agent.run(question: "How many full soccer fields would be needed to cover the di
228
230
  | "search" | A wrapper around Google Search | `ENV["SERPAPI_API_KEY"]` (https://serpapi.com/manage-api-key)
229
231
  | "wikipedia" | Calls Wikipedia API to retrieve the summary | |
230
232
 
233
+
234
+ ## Logging
235
+
236
+ LangChain.rb uses standard logging mechanisms and defaults to `:debug` level. Most messages are at info level, but we will add debug or warn statements as needed.
237
+ To show all log messages:
238
+
239
+ ```ruby
240
+ Lanchgain.logger.level = :info
241
+ ```
242
+
231
243
  ## Development
232
244
 
233
245
  After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
data/Rakefile CHANGED
@@ -2,7 +2,12 @@
2
2
 
3
3
  require "bundler/gem_tasks"
4
4
  require "rspec/core/rake_task"
5
+ require "standard/rake"
5
6
 
6
7
  RSpec::Core::RakeTask.new(:spec)
7
8
 
8
9
  task default: :spec
10
+
11
+ Rake::Task["spec"].enhance do
12
+ Rake::Task["standard:fix"].invoke
13
+ end
@@ -9,10 +9,10 @@ prompt = Prompt::FewShotPromptTemplate.new(
9
9
  template: "Input: {input}\nOutput: {output}"
10
10
  ),
11
11
  examples: [
12
- { "input": "happy", "output": "sad" },
13
- { "input": "tall", "output": "short" }
12
+ {input: "happy", output: "sad"},
13
+ {input: "tall", output: "short"}
14
14
  ],
15
- input_variables: ["adjective"]
15
+ input_variables: ["adjective"]
16
16
  )
17
17
 
18
18
  prompt.format(adjective: "good")
@@ -35,9 +35,9 @@ pinecone.ask(
35
35
  )
36
36
 
37
37
  # Generate your an embedding and search by it
38
- openai = LLM::OpenAI.new(api_key: ENV['OPENAI_API_KEY'])
38
+ openai = LLM::OpenAI.new(api_key: ENV["OPENAI_API_KEY"])
39
39
  embedding = openai.embed(text: "veggie")
40
40
 
41
41
  pinecone.similarity_search_by_vector(
42
42
  embedding: embedding
43
- )
43
+ )
@@ -9,7 +9,6 @@ qdrant = Vectorsearch::Qdrant.new(
9
9
  llm_api_key: ENV["COHERE_API_KEY"]
10
10
  )
11
11
 
12
-
13
12
  # Create the default schema.
14
13
  qdrant.create_default_schema
15
14
 
@@ -33,4 +32,4 @@ qdrant.similarity_search(
33
32
  # Interact with your index through Q&A
34
33
  qdrant.ask(
35
34
  question: "What is the best recipe for chicken?"
36
- )
35
+ )
@@ -27,4 +27,4 @@ weaviate.add_texts(
27
27
  weaviate.similarity_search(
28
28
  query: "chicken",
29
29
  k: 1
30
- )
30
+ )
@@ -5,7 +5,7 @@ module Agent
5
5
  attr_reader :llm, :llm_api_key, :llm_client, :tools
6
6
 
7
7
  # Initializes the Agent
8
- #
8
+ #
9
9
  # @param llm [Symbol] The LLM to use
10
10
  # @param llm_api_key [String] The API key for the LLM
11
11
  # @param tools [Array] The tools to use
@@ -22,7 +22,7 @@ module Agent
22
22
  end
23
23
 
24
24
  # Validate tools when they're re-assigned
25
- #
25
+ #
26
26
  # @param value [Array] The tools to use
27
27
  # @return [Array] The tools that will be used
28
28
  def tools=(value)
@@ -31,11 +31,10 @@ module Agent
31
31
  end
32
32
 
33
33
  # Run the Agent!
34
- #
34
+ #
35
35
  # @param question [String] The question to ask
36
- # @param logging [Boolean] Whether or not to log the Agent's actions
37
36
  # @return [String] The answer to the question
38
- def run(question:, logging: false)
37
+ def run(question:)
39
38
  question = question.strip
40
39
  prompt = create_prompt(
41
40
  question: question,
@@ -43,24 +42,25 @@ module Agent
43
42
  )
44
43
 
45
44
  loop do
46
- puts("Agent: Passing the prompt to the #{llm} LLM") if logging
45
+ Langchain.logger.info("Agent: Passing the prompt to the #{llm} LLM")
47
46
  response = llm_client.generate_completion(
48
47
  prompt: prompt,
49
48
  stop_sequences: ["Observation:"],
50
49
  max_tokens: 500
51
50
  )
52
51
 
52
+ binding.pry
53
53
  # Append the response to the prompt
54
- prompt += response;
55
-
54
+ prompt += response
55
+
56
56
  # Find the requested action in the "Action: search" format
57
57
  action = response.match(/Action: (.*)/)&.send(:[], -1)
58
-
58
+
59
59
  if action
60
60
  # Find the input to the action in the "Action Input: [action_input]" format
61
61
  action_input = response.match(/Action Input: "?(.*)"?/)&.send(:[], -1)
62
62
 
63
- puts("Agent: Using the \"#{action}\" Tool with \"#{action_input}\"") if logging
63
+ Langchain.logger.info("Agent: Using the \"#{action}\" Tool with \"#{action_input}\"")
64
64
 
65
65
  # Retrieve the Tool::[ToolName] class and call `execute`` with action_input as the input
66
66
  result = Tool
@@ -68,10 +68,10 @@ module Agent
68
68
  .execute(input: action_input)
69
69
 
70
70
  # Append the Observation to the prompt
71
- if prompt.end_with?("Observation:")
72
- prompt += " #{result}\nThought:"
71
+ prompt += if prompt.end_with?("Observation:")
72
+ " #{result}\nThought:"
73
73
  else
74
- prompt += "\nObservation: #{result}\nThought:"
74
+ "\nObservation: #{result}\nThought:"
75
75
  end
76
76
  else
77
77
  # Return the final answer
@@ -92,7 +92,7 @@ module Agent
92
92
  question: question,
93
93
  tool_names: "[#{tools.join(", ")}]",
94
94
  tools: tools.map do |tool|
95
- "#{tool}: #{Tool.const_get(Tool::Base::TOOLS[tool]).const_get("DESCRIPTION")}"
95
+ "#{tool}: #{Tool.const_get(Tool::Base::TOOLS[tool]).const_get(:DESCRIPTION)}"
96
96
  end.join("\n")
97
97
  )
98
98
  end
@@ -0,0 +1,18 @@
1
+ # frozen_string_literal: true
2
+
3
+ def depends_on(gem_name)
4
+ gem(gem_name) # require the gem
5
+
6
+ return(true) unless defined?(Bundler) # If we're in a non-bundler environment, we're no longer able to determine if we'll meet requirements
7
+
8
+ gem_version = Gem.loaded_specs[gem_name].version
9
+ gem_requirement = Bundler.load.dependencies.find { |g| g.name == gem_name }.requirement
10
+
11
+ if !gem_requirement.satisfied_by?(gem_version)
12
+ raise "The #{gem_name} gem is installed, but version #{gem_requirement} is required. You have #{gem_version}."
13
+ end
14
+
15
+ true
16
+ rescue LoadError
17
+ raise LoadError, "Could not load #{gem_name}. Please ensure that the #{gem_name} gem is installed."
18
+ end
data/lib/langchain.rb CHANGED
@@ -1,6 +1,8 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require_relative "./version"
4
+ require_relative "./dependency_helper"
5
+ require_relative "./logging"
4
6
 
5
7
  module Agent
6
8
  autoload :Base, "agent/base"
@@ -32,6 +34,7 @@ end
32
34
  module Tool
33
35
  autoload :Base, "tool/base"
34
36
  autoload :Calculator, "tool/calculator"
37
+ autoload :News, "tool/news"
35
38
  autoload :SerpApi, "tool/serp_api"
36
39
  autoload :Wikipedia, "tool/wikipedia"
37
40
  end
data/lib/llm/base.rb CHANGED
@@ -12,16 +12,16 @@ module LLM
12
12
  }.freeze
13
13
 
14
14
  def default_dimension
15
- self.class.const_get("DEFAULTS").dig(:dimension)
15
+ self.class.const_get(:DEFAULTS).dig(:dimension)
16
16
  end
17
17
 
18
18
  # Ensure that the LLM value passed in is supported
19
19
  # @param llm [Symbol] The LLM to use
20
20
  def self.validate_llm!(llm:)
21
21
  # TODO: Fix so this works when `llm` value is a string instead of a symbol
22
- unless LLM::Base::LLMS.keys.include?(llm)
22
+ unless LLM::Base::LLMS.key?(llm)
23
23
  raise ArgumentError, "LLM must be one of #{LLM::Base::LLMS.keys}"
24
24
  end
25
25
  end
26
26
  end
27
- end
27
+ end
data/lib/llm/cohere.rb CHANGED
@@ -1,10 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "cohere"
4
-
5
3
  module LLM
6
4
  class Cohere < Base
7
-
8
5
  DEFAULTS = {
9
6
  temperature: 0.0,
10
7
  completion_model_name: "base",
@@ -13,6 +10,9 @@ module LLM
13
10
  }.freeze
14
11
 
15
12
  def initialize(api_key:)
13
+ depends_on "cohere-ruby"
14
+ require "cohere"
15
+
16
16
  @client = ::Cohere::Client.new(api_key: api_key)
17
17
  end
18
18
 
@@ -22,7 +22,7 @@ module LLM
22
22
  def embed(text:)
23
23
  response = client.embed(
24
24
  texts: [text],
25
- model: DEFAULTS[:embeddings_model_name],
25
+ model: DEFAULTS[:embeddings_model_name]
26
26
  )
27
27
  response.dig("embeddings").first
28
28
  end
@@ -50,4 +50,4 @@ module LLM
50
50
  alias_method :generate_completion, :complete
51
51
  alias_method :generate_embedding, :embed
52
52
  end
53
- end
53
+ end
data/lib/llm/openai.rb CHANGED
@@ -4,7 +4,6 @@ require "openai"
4
4
 
5
5
  module LLM
6
6
  class OpenAI < Base
7
-
8
7
  DEFAULTS = {
9
8
  temperature: 0.0,
10
9
  completion_model_name: "text-davinci-003",
@@ -53,4 +52,4 @@ module LLM
53
52
  alias_method :generate_completion, :complete
54
53
  alias_method :generate_embedding, :embed
55
54
  end
56
- end
55
+ end
data/lib/logging.rb ADDED
@@ -0,0 +1,13 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "logger"
4
+
5
+ module Langchain
6
+ def self.logger
7
+ @@logger ||= Logger.new(STDOUT, level: :warn, formatter: -> (severity, datetime, progname, msg) { "[LangChain.rb] #{msg}\n" })
8
+ end
9
+
10
+ def self.logger=(instance)
11
+ @@logger = instance
12
+ end
13
+ end
data/lib/prompt/base.rb CHANGED
@@ -1,6 +1,6 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require 'strscan'
3
+ require "strscan"
4
4
 
5
5
  module Prompt
6
6
  class Base
@@ -52,14 +52,12 @@ module Prompt
52
52
  FileUtils.mkdir_p(directory_path) unless directory_path.directory?
53
53
 
54
54
  if save_path.extname == ".json"
55
- File.open(file_path, "w") { |f| f.write(to_h.to_json) }
55
+ File.write(file_path, to_h.to_json)
56
56
  else
57
57
  raise ArgumentError, "#{file_path} must be json"
58
58
  end
59
59
  end
60
60
 
61
- private
62
-
63
61
  #
64
62
  # Extracts variables from a template string.
65
63
  #
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require 'strscan'
4
- require 'pathname'
3
+ require "strscan"
4
+ require "pathname"
5
5
 
6
6
  module Prompt
7
7
  TYPE_TO_LOADER = {
@@ -70,7 +70,7 @@ module Prompt
70
70
  def load_from_config(config)
71
71
  # If `_type` key is not present in the configuration hash, add it with a default value of `prompt`
72
72
  unless config.key?("_type")
73
- puts "[WARN] No `_type` key found, defaulting to `prompt`"
73
+ Langchain.logger.warn "No `_type` key found, defaulting to `prompt`"
74
74
  config["_type"] = "prompt"
75
75
  end
76
76
 
data/lib/tool/base.rb CHANGED
@@ -12,9 +12,14 @@ module Tool
12
12
  TOOLS = {
13
13
  "calculator" => "Tool::Calculator",
14
14
  "search" => "Tool::SerpApi",
15
- "wikipedia" => "Tool::Wikipedia"
15
+ "wikipedia" => "Tool::Wikipedia",
16
+ "news" => "Tool::News"
16
17
  }
17
18
 
19
+ def self.description(value)
20
+ const_set(:DESCRIPTION, value.tr("\n", " ").strip)
21
+ end
22
+
18
23
  # Executes the tool and returns the answer
19
24
  # @param input [String] input to the tool
20
25
  # @return [String] answer
@@ -22,17 +27,17 @@ module Tool
22
27
  raise NotImplementedError, "Your tool must implement the `self.execute(input:)` method that returns a string"
23
28
  end
24
29
 
25
- #
30
+ #
26
31
  # Validates the list of strings (tools) are all supported or raises an error
27
32
  # @param tools [Array<String>] list of tools to be used
28
- #
33
+ #
29
34
  # @raise [ArgumentError] If any of the tools are not supported
30
- #
35
+ #
31
36
  def self.validate_tools!(tools:)
32
- unrecognized_tools = tools - Tool::Base::TOOLS.keys
37
+ unrecognized_tools = tools - Tool::Base::TOOLS.keys
33
38
 
34
39
  if unrecognized_tools.any?
35
- raise ArgumentError, "Unrecognized Tools: #{unrecognized_tools}"
40
+ raise ArgumentError, "Unrecognized Tools: #{unrecognized_tools}"
36
41
  end
37
42
  end
38
43
  end
@@ -4,8 +4,11 @@ require "eqn"
4
4
 
5
5
  module Tool
6
6
  class Calculator < Base
7
- DESCRIPTION = "Useful for getting the result of a math expression. " +
8
- "The input to this tool should be a valid mathematical expression that could be executed by a simple calculator."
7
+ description <<~DESC
8
+ Useful for getting the result of a math expression.
9
+
10
+ The input to this tool should be a valid mathematical expression that could be executed by a simple calculator.
11
+ DESC
9
12
 
10
13
  # Evaluates a pure math expression or if equation contains non-math characters (e.g.: "12F in Celsius") then
11
14
  # it uses the google search calculator to evaluate the expression
data/lib/tool/serp_api.rb CHANGED
@@ -7,10 +7,14 @@ module Tool
7
7
  # Wrapper around SerpAPI
8
8
  # Set ENV["SERPAPI_API_KEY"] to use it
9
9
 
10
- DESCRIPTION = "A wrapper around Google Search. " +
11
- "Useful for when you need to answer questions about current events. " +
12
- "Always one of the first options when you need to find information on internet. " +
13
- "Input should be a search query."
10
+ description <<~DESC
11
+ A wrapper around Google Search.
12
+
13
+ Useful for when you need to answer questions about current events.
14
+ Always one of the first options when you need to find information on internet.
15
+
16
+ Input should be a search query.
17
+ DESC
14
18
 
15
19
  # Executes Google Search and returns hash_results JSON
16
20
  # @param input [String] search query
@@ -18,7 +22,7 @@ module Tool
18
22
  # TODO: Glance at all of the fields that langchain Python looks through: https://github.com/hwchase17/langchain/blob/v0.0.166/langchain/utilities/serpapi.py#L128-L156
19
23
  # We may need to do the same thing here.
20
24
  def self.execute(input:)
21
- hash_results = self.execute_search(input: input)
25
+ hash_results = execute_search(input: input)
22
26
 
23
27
  hash_results.dig(:answer_box, :answer) ||
24
28
  hash_results.dig(:answer_box, :snippet) ||
@@ -33,7 +37,7 @@ module Tool
33
37
  q: input,
34
38
  serp_api_key: ENV["SERPAPI_API_KEY"]
35
39
  )
36
- .get_hash
40
+ .get_hash
37
41
  end
38
42
  end
39
43
  end
@@ -1,15 +1,19 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require 'wikipedia'
3
+ require "wikipedia"
4
4
 
5
5
  module Tool
6
6
  class Wikipedia < Base
7
7
  # Tool that adds the capability to search using the Wikipedia API
8
8
 
9
- DESCRIPTION = "A wrapper around Wikipedia. " +
10
- "Useful for when you need to answer general questions about " +
11
- "people, places, companies, facts, historical events, or other subjects. " +
12
- "Input should be a search query."
9
+ description <<~DESC
10
+ A wrapper around Wikipedia.
11
+
12
+ Useful for when you need to answer general questions about
13
+ people, places, companies, facts, historical events, or other subjects.
14
+
15
+ Input should be a search query.
16
+ DESC
13
17
 
14
18
  # Executes Wikipedia API search and returns the answer
15
19
  # @param input [String] search query
@@ -17,7 +21,7 @@ module Tool
17
21
  def self.execute(input:)
18
22
  page = ::Wikipedia.find(input)
19
23
  # It would be nice to figure out a way to provide page.content but the LLM token limit is an issue
20
- page.summary
24
+ page.summary
21
25
  end
22
26
  end
23
27
  end
@@ -6,7 +6,7 @@ module Vectorsearch
6
6
 
7
7
  attr_reader :client, :index_name, :llm, :llm_api_key, :llm_client
8
8
 
9
- DEFAULT_METRIC = "cosine".freeze
9
+ DEFAULT_METRIC = "cosine"
10
10
 
11
11
  # @param llm [Symbol] The LLM to use
12
12
  # @param llm_api_key [String] The API key for the LLM
@@ -46,7 +46,7 @@ module Vectorsearch
46
46
  input_variables: ["context"]
47
47
  ),
48
48
  examples: [
49
- { context: context }
49
+ {context: context}
50
50
  ],
51
51
  input_variables: ["question"],
52
52
  example_separator: "\n"
@@ -55,4 +55,4 @@ module Vectorsearch
55
55
  prompt_template.format(question: question)
56
56
  end
57
57
  end
58
- end
58
+ end
@@ -6,10 +6,7 @@ module Vectorsearch
6
6
  class Milvus < Base
7
7
  def initialize(
8
8
  url:,
9
- api_key: nil,
10
- index_name:,
11
- llm:,
12
- llm_api_key:
9
+ index_name:, llm:, llm_api_key:, api_key: nil
13
10
  )
14
11
  @client = ::Milvus::Client.new(
15
12
  url: url
@@ -96,7 +93,7 @@ module Vectorsearch
96
93
  client.search(
97
94
  collection_name: index_name,
98
95
  top_k: k.to_s,
99
- vectors: [ embedding ],
96
+ vectors: [embedding],
100
97
  dsl_type: 1,
101
98
  params: "{\"nprobe\": 10}",
102
99
  anns_field: "content",
@@ -18,10 +18,10 @@ module Vectorsearch
18
18
  llm_api_key:
19
19
  )
20
20
  ::Pinecone.configure do |config|
21
- config.api_key = api_key
21
+ config.api_key = api_key
22
22
  config.environment = environment
23
23
  end
24
-
24
+
25
25
  @client = ::Pinecone::Client.new
26
26
  @index_name = index_name
27
27
 
@@ -38,7 +38,7 @@ module Vectorsearch
38
38
  {
39
39
  # TODO: Allows passing in your own IDs
40
40
  id: SecureRandom.uuid,
41
- metadata: { content: text },
41
+ metadata: {content: text},
42
42
  values: generate_embedding(text: text)
43
43
  }
44
44
  end
@@ -109,4 +109,4 @@ module Vectorsearch
109
109
  generate_completion(prompt: prompt)
110
110
  end
111
111
  end
112
- end
112
+ end
@@ -32,12 +32,12 @@ module Vectorsearch
32
32
  def add_texts(
33
33
  texts:
34
34
  )
35
- batch = { ids: [], vectors: [], payloads: [] }
35
+ batch = {ids: [], vectors: [], payloads: []}
36
36
 
37
37
  texts.each do |text|
38
38
  batch[:ids].push(SecureRandom.uuid)
39
39
  batch[:vectors].push(generate_embedding(text: text))
40
- batch[:payloads].push({ content: text })
40
+ batch[:payloads].push({content: text})
41
41
  end
42
42
 
43
43
  client.points.upsert(
@@ -106,4 +106,4 @@ module Vectorsearch
106
106
  generate_completion(prompt: prompt)
107
107
  end
108
108
  end
109
- end
109
+ end
@@ -37,7 +37,7 @@ module Vectorsearch
37
37
  objects = texts.map do |text|
38
38
  {
39
39
  class: index_name,
40
- properties: { content: text }
40
+ properties: {content: text}
41
41
  }
42
42
  end
43
43
 
@@ -50,7 +50,7 @@ module Vectorsearch
50
50
  def create_default_schema
51
51
  client.schema.create(
52
52
  class_name: index_name,
53
- vectorizer: "text2vec-#{llm.to_s}",
53
+ vectorizer: "text2vec-#{llm}",
54
54
  # TODO: Figure out a way to optionally enable it
55
55
  # "module_config": {
56
56
  # "qna-openai": {}
@@ -132,4 +132,4 @@ module Vectorsearch
132
132
  end
133
133
  end
134
134
  end
135
- end
135
+ end
data/lib/version.rb CHANGED
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Langchain
4
- VERSION = "0.3.2"
4
+ VERSION = "0.3.3"
5
5
  end
metadata CHANGED
@@ -1,29 +1,29 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: langchainrb
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.2
4
+ version: 0.3.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Andrei Bondarev
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2023-05-15 00:00:00.000000000 Z
11
+ date: 2023-05-16 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
- name: pry-byebug
14
+ name: cohere-ruby
15
15
  requirement: !ruby/object:Gem::Requirement
16
16
  requirements:
17
17
  - - "~>"
18
18
  - !ruby/object:Gem::Version
19
- version: 3.10.0
19
+ version: 0.9.3
20
20
  type: :development
21
21
  prerelease: false
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
24
  - - "~>"
25
25
  - !ruby/object:Gem::Version
26
- version: 3.10.0
26
+ version: 0.9.3
27
27
  - !ruby/object:Gem::Dependency
28
28
  name: dotenv-rails
29
29
  requirement: !ruby/object:Gem::Requirement
@@ -39,19 +39,19 @@ dependencies:
39
39
  - !ruby/object:Gem::Version
40
40
  version: 2.7.6
41
41
  - !ruby/object:Gem::Dependency
42
- name: cohere-ruby
42
+ name: pry-byebug
43
43
  requirement: !ruby/object:Gem::Requirement
44
44
  requirements:
45
45
  - - "~>"
46
46
  - !ruby/object:Gem::Version
47
- version: 0.9.3
48
- type: :runtime
47
+ version: 3.10.0
48
+ type: :development
49
49
  prerelease: false
50
50
  version_requirements: !ruby/object:Gem::Requirement
51
51
  requirements:
52
52
  - - "~>"
53
53
  - !ruby/object:Gem::Version
54
- version: 0.9.3
54
+ version: 3.10.0
55
55
  - !ruby/object:Gem::Dependency
56
56
  name: eqn
57
57
  requirement: !ruby/object:Gem::Requirement
@@ -187,10 +187,12 @@ files:
187
187
  - lib/agent/base.rb
188
188
  - lib/agent/chain_of_thought_agent/chain_of_thought_agent.rb
189
189
  - lib/agent/chain_of_thought_agent/chain_of_thought_agent_prompt.json
190
+ - lib/dependency_helper.rb
190
191
  - lib/langchain.rb
191
192
  - lib/llm/base.rb
192
193
  - lib/llm/cohere.rb
193
194
  - lib/llm/openai.rb
195
+ - lib/logging.rb
194
196
  - lib/prompt/base.rb
195
197
  - lib/prompt/few_shot_prompt_template.rb
196
198
  - lib/prompt/loading.rb