langchainrb 0.2.0 → 0.3.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: c43d156af4f33487e653c0f72d0a27a327ebe07b1ecc7558409b8d877854318c
4
- data.tar.gz: dba1509aa4494e1a35a08a40ca872bcb9f0b3ad03b7f17c6e8c3716c7d5ce805
3
+ metadata.gz: a179004be8e547124fe81922b2cf7190d34f94f7775c28df7986d17644dffa31
4
+ data.tar.gz: 0eaf80695379a5038175d561dfa348d4146073b008aeb662b3b523e8bd04d54d
5
5
  SHA512:
6
- metadata.gz: b800cf71cb8c9193082383d9994f1f40f61d7479e1db57ad970a089dd26e749be684f8f17b388a704aaba595909acb5babe9fa4ab37836953f0d11dbc55efa66
7
- data.tar.gz: 9fdb6337593f3fa5902af104255a3e9f9a06214965b8dcb026d3b914094804b1d546c7b46c0e923d8327a216639ee4eb64e0febeafa9616c73d3706d74032037
6
+ metadata.gz: 152fd7a35c9df7e9541b50059a27f5e76d67263ceb4e25dab07224bd80bffe70936ab712ab3345a401b4be5016a4165c8a7a6cf1b4b1673c36275d926d7ce17d
7
+ data.tar.gz: 848984972f6ab3f1dd4078d303872be1e70ae78a1ceedf2f0b7663f2144d12831c690e1ad48cbd579e779fbe6da8ad6339ee42130716bf4bb137917f5edf3e30
data/CHANGELOG.md CHANGED
@@ -1,5 +1,17 @@
1
1
  ## [Unreleased]
2
2
 
3
+ ## [0.3.1] - 2023-05-12
4
+ - Tools
5
+ - Introducing `Tool::Wikipedia`, a tool that looks up Wikipedia entries
6
+
7
+ ## [0.3.0] - 2023-05-12
8
+
9
+ - Agents
10
+ - Introducing `Agent::ChainOfThoughtAgent`, a semi-autonomous bot that uses Tools to retrieve additional information in order to make best-effort informed replies to user's questions.
11
+ - Tools
12
+ - Introducing `Tool::Calculator` tool that solves mathematical expressions.
13
+ - Introducing `Tool::Search` tool that executes Google Searches.
14
+
3
15
  ## [0.2.0] - 2023-05-09
4
16
 
5
17
  - Prompt Templating
data/Gemfile.lock CHANGED
@@ -1,13 +1,16 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- langchainrb (0.2.0)
5
- cohere-ruby (~> 0.9.1)
4
+ langchainrb (0.3.1)
5
+ cohere-ruby (~> 0.9.3)
6
+ eqn (~> 1.6.5)
7
+ google_search_results (~> 2.0.0)
6
8
  milvus (~> 0.9.0)
7
9
  pinecone (~> 0.1.6)
8
10
  qdrant-ruby (~> 0.9.0)
9
11
  ruby-openai (~> 4.0.0)
10
12
  weaviate-ruby (~> 0.8.0)
13
+ wikipedia-client (~> 1.17.0)
11
14
 
12
15
  GEM
13
16
  remote: https://rubygems.org/
@@ -30,11 +33,14 @@ GEM
30
33
  i18n (>= 1.6, < 2)
31
34
  minitest (>= 5.1)
32
35
  tzinfo (~> 2.0)
36
+ addressable (2.8.4)
37
+ public_suffix (>= 2.0.2, < 6.0)
33
38
  builder (3.2.4)
34
39
  byebug (11.1.3)
35
40
  coderay (1.1.3)
36
- cohere-ruby (0.9.1)
37
- faraday (~> 2.7.0)
41
+ cohere-ruby (0.9.3)
42
+ faraday (~> 1)
43
+ faraday_middleware (~> 1)
38
44
  concurrent-ruby (1.2.2)
39
45
  crass (1.0.6)
40
46
  diff-lcs (1.5.0)
@@ -79,15 +85,38 @@ GEM
79
85
  dry-initializer (~> 3.0)
80
86
  dry-schema (>= 1.12, < 2)
81
87
  zeitwerk (~> 2.6)
88
+ eqn (1.6.5)
89
+ treetop (>= 1.2.0)
82
90
  erubi (1.12.0)
83
- faraday (2.7.4)
84
- faraday-net_http (>= 2.0, < 3.1)
91
+ faraday (1.10.3)
92
+ faraday-em_http (~> 1.0)
93
+ faraday-em_synchrony (~> 1.0)
94
+ faraday-excon (~> 1.1)
95
+ faraday-httpclient (~> 1.0)
96
+ faraday-multipart (~> 1.0)
97
+ faraday-net_http (~> 1.0)
98
+ faraday-net_http_persistent (~> 1.0)
99
+ faraday-patron (~> 1.0)
100
+ faraday-rack (~> 1.0)
101
+ faraday-retry (~> 1.0)
85
102
  ruby2_keywords (>= 0.0.4)
103
+ faraday-em_http (1.0.0)
104
+ faraday-em_synchrony (1.0.0)
105
+ faraday-excon (1.1.0)
106
+ faraday-httpclient (1.0.1)
86
107
  faraday-multipart (1.0.4)
87
108
  multipart-post (~> 2)
88
- faraday-net_http (3.0.2)
89
- graphlient (0.7.0)
90
- faraday (~> 2.0)
109
+ faraday-net_http (1.0.1)
110
+ faraday-net_http_persistent (1.2.0)
111
+ faraday-patron (1.0.0)
112
+ faraday-rack (1.0.0)
113
+ faraday-retry (1.0.3)
114
+ faraday_middleware (1.2.0)
115
+ faraday (~> 1.0)
116
+ google_search_results (2.0.1)
117
+ graphlient (0.6.0)
118
+ faraday (>= 1.0)
119
+ faraday_middleware
91
120
  graphql-client
92
121
  graphql (2.0.21)
93
122
  graphql-client (0.18.0)
@@ -99,12 +128,12 @@ GEM
99
128
  i18n (1.13.0)
100
129
  concurrent-ruby (~> 1.0)
101
130
  ice_nine (0.11.2)
102
- loofah (2.20.0)
131
+ loofah (2.21.1)
103
132
  crass (~> 1.0.2)
104
133
  nokogiri (>= 1.5.9)
105
134
  method_source (1.0.0)
106
- milvus (0.9.0)
107
- faraday (~> 2.7.0)
135
+ milvus (0.9.1)
136
+ faraday (~> 1)
108
137
  mini_mime (1.1.2)
109
138
  minitest (5.18.0)
110
139
  multi_xml (0.6.0)
@@ -117,14 +146,17 @@ GEM
117
146
  dry-struct (~> 1.6.0)
118
147
  dry-validation (~> 1.10.0)
119
148
  httparty (~> 0.21.0)
149
+ polyglot (0.3.5)
120
150
  pry (0.14.2)
121
151
  coderay (~> 1.1)
122
152
  method_source (~> 1.0)
123
153
  pry-byebug (3.10.1)
124
154
  byebug (~> 11.0)
125
155
  pry (>= 0.13, < 0.15)
126
- qdrant-ruby (0.9.0)
127
- faraday (~> 2.7)
156
+ public_suffix (5.0.1)
157
+ qdrant-ruby (0.9.2)
158
+ faraday (~> 1)
159
+ faraday_middleware (~> 1)
128
160
  racc (1.6.2)
129
161
  rack (2.2.7)
130
162
  rack-test (2.1.0)
@@ -160,11 +192,16 @@ GEM
160
192
  faraday-multipart (>= 1)
161
193
  ruby2_keywords (0.0.5)
162
194
  thor (1.2.1)
195
+ treetop (1.6.12)
196
+ polyglot (~> 0.3)
163
197
  tzinfo (2.0.6)
164
198
  concurrent-ruby (~> 1.0)
165
- weaviate-ruby (0.8.0)
166
- faraday (~> 2.7)
167
- graphlient (~> 0.7.0)
199
+ weaviate-ruby (0.8.1)
200
+ faraday (~> 1)
201
+ faraday_middleware (~> 1)
202
+ graphlient (~> 0.6.0)
203
+ wikipedia-client (1.17.0)
204
+ addressable (~> 2.7)
168
205
  zeitwerk (2.6.8)
169
206
 
170
207
  PLATFORMS
data/README.md CHANGED
@@ -26,7 +26,7 @@ If bundler is not being used to manage dependencies, install the gem by executin
26
26
  require "langchain"
27
27
  ```
28
28
 
29
- List of currently supported vector search databases and features:
29
+ #### Supported vector search databases and features:
30
30
 
31
31
  | Database | Querying | Storage | Schema Management | Backups | Rails Integration | ??? |
32
32
  | -------- |:------------------:| -------:| -----------------:| -------:| -----------------:| ---:|
@@ -35,7 +35,7 @@ List of currently supported vector search databases and features:
35
35
  | Milvus | :white_check_mark: | WIP | WIP | WIP | | |
36
36
  | Pinecone | :white_check_mark: | WIP | WIP | WIP | | |
37
37
 
38
- ### Using Vector Search Databases
38
+ ### Using Vector Search Databases 🔍
39
39
 
40
40
  Choose the LLM provider you'll be using (OpenAI or Cohere) and retrieve the API key.
41
41
 
@@ -90,7 +90,7 @@ client.ask(
90
90
  )
91
91
  ```
92
92
 
93
- ### Using Standalone LLMs
93
+ ### Using Standalone LLMs 🗣️
94
94
 
95
95
  #### OpenAI
96
96
  ```ruby
@@ -114,7 +114,7 @@ cohere.embed(text: "foo bar")
114
114
  cohere.complete(prompt: "What is the meaning of life?")
115
115
  ```
116
116
 
117
- ### Using Prompts
117
+ ### Using Prompts 📋
118
118
 
119
119
  #### Prompt Templates
120
120
 
@@ -199,16 +199,50 @@ prompt = Prompt.load_from_path(file_path: "spec/fixtures/prompt/few_shot_prompt_
199
199
  prompt.prefix # "Write antonyms for the following words."
200
200
  ```
201
201
 
202
+ ### Using Agents 🤖
203
+ Agents are semi-autonomous bots that can respond to user questions and use available to them Tools to provide informed replies. They break down problems into series of steps and define Actions (and Action Inputs) along the way that are executed and fed back to them as additional information. Once an Agent decides that it has the Final Answer it responds with it.
204
+
205
+ #### Chain-of-Thought Agent
206
+
207
+ ```ruby
208
+ agent = Agent::ChainOfThoughtAgent.new(llm: :openai, llm_api_key: ENV["OPENAI_API_KEY"], tools: ['search', 'calculator'])
209
+
210
+ agent.tools
211
+ # => ["search", "calculator"]
212
+ ```
213
+ ```ruby
214
+ agent.run(question: "How many full soccer fields would be needed to cover the distance between NYC and DC in a straight line?", logging: true)
215
+ #=> "Approximately 2,945 soccer fields would be needed to cover the distance between NYC and DC in a straight line."
216
+ ```
217
+
218
+ #### Demo
219
+ ![May-12-2023 13-09-13](https://github.com/andreibondarev/langchainrb/assets/541665/6bad4cd9-976c-420f-9cf9-b85bf84f7eaf)
220
+
221
+ ![May-12-2023 13-07-45](https://github.com/andreibondarev/langchainrb/assets/541665/9aacdcc7-4225-4ea0-ab96-7ee48826eb9b)
222
+
223
+ #### Available Tools 🛠️
224
+
225
+ | Name | Description | Requirements |
226
+ | -------- | :------------------: | :------------------: |
227
+ | "calculator" | Useful for getting the result of a math expression | |
228
+ | "search" | A wrapper around Google Search | `ENV["SERPAPI_API_KEY"]` (https://serpapi.com/manage-api-key)
229
+ | "wikipedia" | Calls Wikipedia API to retrieve the summary | |
230
+
202
231
  ## Development
203
232
 
204
233
  After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
205
234
 
206
235
  To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and the created tag, and push the `.gem` file to [rubygems.org](https://rubygems.org).
207
236
 
237
+ ## Core Contributors
238
+ [<img style="border-radius:50%" alt="Andrei Bondarev" src="https://avatars.githubusercontent.com/u/541665?v=4" width="80" height="80" class="avatar">](https://github.com/andreibondarev)
239
+
208
240
  ## Honorary Contributors
209
241
  [<img style="border-radius:50%" alt="Andrei Bondarev" src="https://avatars.githubusercontent.com/u/541665?v=4" width="80" height="80" class="avatar">](https://github.com/andreibondarev)
210
242
  [<img style="border-radius:50%" alt="Rafael Figueiredo" src="https://avatars.githubusercontent.com/u/35845775?v=4" width="80" height="80" class="avatar">](https://github.com/rafaelqfigueiredo)
211
243
 
244
+ (Criteria of becoming an Honorary Contributor or Core Contributor is pending...)
245
+
212
246
  ## Contributing
213
247
 
214
248
  Bug reports and pull requests are welcome on GitHub at https://github.com/andreibondarev/langchain.
data/lib/agent/base.rb ADDED
@@ -0,0 +1,6 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Agent
4
+ class Base
5
+ end
6
+ end
@@ -0,0 +1,108 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Agent
4
+ class ChainOfThoughtAgent < Base
5
+ attr_reader :llm, :llm_api_key, :llm_client, :tools
6
+
7
+ # Initializes the Agent
8
+ #
9
+ # @param llm [Symbol] The LLM to use
10
+ # @param llm_api_key [String] The API key for the LLM
11
+ # @param tools [Array] The tools to use
12
+ # @return [ChainOfThoughtAgent] The Agent::ChainOfThoughtAgent instance
13
+ def initialize(llm:, llm_api_key:, tools: [])
14
+ LLM::Base.validate_llm!(llm: llm)
15
+ Tool::Base.validate_tools!(tools: tools)
16
+
17
+ @llm = llm
18
+ @llm_api_key = llm_api_key
19
+ @tools = tools
20
+
21
+ @llm_client = LLM.const_get(LLM::Base::LLMS.fetch(llm)).new(api_key: llm_api_key)
22
+ end
23
+
24
+ # Validate tools when they're re-assigned
25
+ #
26
+ # @param value [Array] The tools to use
27
+ # @return [Array] The tools that will be used
28
+ def tools=(value)
29
+ Tool::Base.validate_tools!(tools: value)
30
+ @tools = value
31
+ end
32
+
33
+ # Run the Agent!
34
+ #
35
+ # @param question [String] The question to ask
36
+ # @param logging [Boolean] Whether or not to log the Agent's actions
37
+ # @return [String] The answer to the question
38
+ def run(question:, logging: false)
39
+ question = question.strip
40
+ prompt = create_prompt(
41
+ question: question,
42
+ tools: tools
43
+ )
44
+
45
+ loop do
46
+ puts("Agent: Passing the prompt to the #{llm} LLM") if logging
47
+ response = llm_client.generate_completion(
48
+ prompt: prompt,
49
+ stop_sequences: ["Observation:"],
50
+ max_tokens: 500
51
+ )
52
+
53
+ # Append the response to the prompt
54
+ prompt += response;
55
+
56
+ # Find the requested action in the "Action: search" format
57
+ action = response.match(/Action: (.*)/)&.send(:[], -1)
58
+
59
+ if action
60
+ # Find the input to the action in the "Action Input: [action_input]" format
61
+ action_input = response.match(/Action Input: "?(.*)"?/)&.send(:[], -1)
62
+
63
+ puts("Agent: Using the \"#{action}\" Tool with \"#{action_input}\"") if logging
64
+
65
+ # Retrieve the Tool::[ToolName] class and call `execute`` with action_input as the input
66
+ result = Tool
67
+ .const_get(Tool::Base::TOOLS[action.strip])
68
+ .execute(input: action_input)
69
+
70
+ # Append the Observation to the prompt
71
+ if prompt.end_with?("Observation:")
72
+ prompt += " #{result}\nThought:"
73
+ else
74
+ prompt += "\nObservation: #{result}\nThought:"
75
+ end
76
+ else
77
+ # Return the final answer
78
+ break response.match(/Final Answer: (.*)/)&.send(:[], -1)
79
+ end
80
+ end
81
+ end
82
+
83
+ private
84
+
85
+ # Create the initial prompt to pass to the LLM
86
+ # @param question [String] Question to ask
87
+ # @param tools [Array] Tools to use
88
+ # @return [String] Prompt
89
+ def create_prompt(question:, tools:)
90
+ prompt_template.format(
91
+ date: Date.today.strftime("%B %d, %Y"),
92
+ question: question,
93
+ tool_names: "[#{tools.join(", ")}]",
94
+ tools: tools.map do |tool|
95
+ "#{tool}: #{Tool.const_get(Tool::Base::TOOLS[tool]).const_get("DESCRIPTION")}"
96
+ end.join("\n")
97
+ )
98
+ end
99
+
100
+ # Load the PromptTemplate from the JSON file
101
+ # @return [PromptTemplate] PromptTemplate instance
102
+ def prompt_template
103
+ @template ||= Prompt.load_from_path(
104
+ file_path: "lib/agent/chain_of_thought_agent/chain_of_thought_agent_prompt.json"
105
+ )
106
+ end
107
+ end
108
+ end
@@ -0,0 +1,10 @@
1
+ {
2
+ "_type": "prompt",
3
+ "template": "Today is {date} and you can use tools to get new information. Answer the following questions as best you can using the following tools:\n\n{tools}\n\nUse the following format:\n\nQuestion: the input question you must answer\nThought: you should always think about what to do\nAction: the action to take, should be one of {tool_names}\nAction Input: the input to the action\nObservation: the result of the action\n... (this Thought/Action/Action Input/Observation can repeat N times)\nThought: I now know the final answer\nFinal Answer: the final answer to the original input question\n\nBegin!\n\nQuestion: {question}\nThought:",
4
+ "input_variables": [
5
+ "date",
6
+ "question",
7
+ "tools",
8
+ "tool_names"
9
+ ]
10
+ }
data/lib/langchain.rb CHANGED
@@ -2,6 +2,11 @@
2
2
 
3
3
  require_relative "./version"
4
4
 
5
+ module Agent
6
+ autoload :Base, "agent/base"
7
+ autoload :ChainOfThoughtAgent, "agent/chain_of_thought_agent/chain_of_thought_agent.rb"
8
+ end
9
+
5
10
  module Vectorsearch
6
11
  autoload :Base, "vectorsearch/base"
7
12
  autoload :Milvus, "vectorsearch/milvus"
@@ -22,4 +27,11 @@ module Prompt
22
27
  autoload :Base, "prompt/base"
23
28
  autoload :PromptTemplate, "prompt/prompt_template"
24
29
  autoload :FewShotPromptTemplate, "prompt/few_shot_prompt_template"
25
- end
30
+ end
31
+
32
+ module Tool
33
+ autoload :Base, "tool/base"
34
+ autoload :Calculator, "tool/calculator"
35
+ autoload :SerpApi, "tool/serp_api"
36
+ autoload :Wikipedia, "tool/wikipedia"
37
+ end
data/lib/llm/base.rb CHANGED
@@ -14,5 +14,14 @@ module LLM
14
14
  def default_dimension
15
15
  self.class.const_get("DEFAULTS").dig(:dimension)
16
16
  end
17
+
18
+ # Ensure that the LLM value passed in is supported
19
+ # @param llm [Symbol] The LLM to use
20
+ def self.validate_llm!(llm:)
21
+ # TODO: Fix so this works when `llm` value is a string instead of a symbol
22
+ unless LLM::Base::LLMS.keys.include?(llm)
23
+ raise ArgumentError, "LLM must be one of #{LLM::Base::LLMS.keys}"
24
+ end
25
+ end
17
26
  end
18
27
  end
data/lib/llm/cohere.rb CHANGED
@@ -30,12 +30,20 @@ module LLM
30
30
  # Generate a completion for a given prompt
31
31
  # @param prompt [String] The prompt to generate a completion for
32
32
  # @return [Hash] The completion
33
- def complete(prompt:)
34
- response = client.generate(
33
+ def complete(prompt:, **params)
34
+ default_params = {
35
35
  prompt: prompt,
36
36
  temperature: DEFAULTS[:temperature],
37
- model: DEFAULTS[:completion_model_name],
38
- )
37
+ model: "xlarge" #DEFAULTS[:completion_model_name]
38
+ }
39
+
40
+ if params[:stop_sequences]
41
+ default_params[:stop_sequences] = params.delete(:stop_sequences)
42
+ end
43
+
44
+ default_params.merge!(params)
45
+
46
+ response = client.generate(**default_params)
39
47
  response.dig("generations").first.dig("text")
40
48
  end
41
49
 
data/lib/llm/openai.rb CHANGED
@@ -33,15 +33,21 @@ module LLM
33
33
  # Generate a completion for a given prompt
34
34
  # @param prompt [String] The prompt to generate a completion for
35
35
  # @return [String] The completion
36
- def complete(prompt:)
37
- response = client.completions(
38
- parameters: {
39
- model: DEFAULTS[:completion_model_name],
40
- temperature: DEFAULTS[:temperature],
41
- prompt: prompt
42
- }
43
- )
44
- response.dig("choices").first.dig("text")
36
+ def complete(prompt:, **params)
37
+ default_params = {
38
+ model: DEFAULTS[:completion_model_name],
39
+ temperature: DEFAULTS[:temperature],
40
+ prompt: prompt
41
+ }
42
+
43
+ if params[:stop_sequences]
44
+ default_params[:stop] = params.delete(:stop_sequences)
45
+ end
46
+
47
+ default_params.merge!(params)
48
+
49
+ response = client.completions(parameters: default_params)
50
+ response.dig("choices", 0, "text")
45
51
  end
46
52
 
47
53
  alias_method :generate_completion, :complete
data/lib/tool/base.rb ADDED
@@ -0,0 +1,39 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Tool
4
+ class Base
5
+ # How to add additional Tools?
6
+ # 1. Create a new file in lib/tool/your_tool_name.rb
7
+ # 2. Add your tool to the TOOLS hash below
8
+ # "your_tool_name" => "Tool::YourToolName"
9
+ # 3. Implement `self.execute(input:)` method in your tool class
10
+ # 4. Add your tool to the README.md
11
+
12
+ TOOLS = {
13
+ "calculator" => "Tool::Calculator",
14
+ "search" => "Tool::SerpApi",
15
+ "wikipedia" => "Tool::Wikipedia"
16
+ }
17
+
18
+ # Executes the tool and returns the answer
19
+ # @param input [String] input to the tool
20
+ # @return [String] answer
21
+ def self.execute(input:)
22
+ raise NotImplementedError, "Your tool must implement the `self.execute(input:)` method that returns a string"
23
+ end
24
+
25
+ #
26
+ # Validates the list of strings (tools) are all supported or raises an error
27
+ # @param tools [Array<String>] list of tools to be used
28
+ #
29
+ # @raise [ArgumentError] If any of the tools are not supported
30
+ #
31
+ def self.validate_tools!(tools:)
32
+ unrecognized_tools = tools - Tool::Base::TOOLS.keys
33
+
34
+ if unrecognized_tools.any?
35
+ raise ArgumentError, "Unrecognized Tools: #{unrecognized_tools}"
36
+ end
37
+ end
38
+ end
39
+ end
@@ -0,0 +1,23 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "eqn"
4
+
5
+ module Tool
6
+ class Calculator < Base
7
+ DESCRIPTION = "Useful for getting the result of a math expression. " +
8
+ "The input to this tool should be a valid mathematical expression that could be executed by a simple calculator."
9
+
10
+ # Evaluates a pure math expression or if equation contains non-math characters (e.g.: "12F in Celsius") then
11
+ # it uses the google search calculator to evaluate the expression
12
+ # @param input [String] math expression
13
+ # @return [String] Answer
14
+ def self.execute(input:)
15
+ Eqn::Calculator.calc(input)
16
+ rescue Eqn::ParseError, Eqn::NoVariableValueError
17
+ # Sometimes the input is not a pure math expression, e.g: "12F in Celsius"
18
+ # We can use the google answer box to evaluate this expression
19
+ hash_results = Tool::SerpApi.execute_search(input: input)
20
+ hash_results.dig(:answer_box, :to)
21
+ end
22
+ end
23
+ end
@@ -0,0 +1,39 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "google_search_results"
4
+
5
+ module Tool
6
+ class SerpApi < Base
7
+ # Wrapper around SerpAPI
8
+ # Set ENV["SERPAPI_API_KEY"] to use it
9
+
10
+ DESCRIPTION = "A wrapper around Google Search. " +
11
+ "Useful for when you need to answer questions about current events. " +
12
+ "Always one of the first options when you need to find information on internet. " +
13
+ "Input should be a search query."
14
+
15
+ # Executes Google Search and returns hash_results JSON
16
+ # @param input [String] search query
17
+ # @return [String] Answer
18
+ # TODO: Glance at all of the fields that langchain Python looks through: https://github.com/hwchase17/langchain/blob/v0.0.166/langchain/utilities/serpapi.py#L128-L156
19
+ # We may need to do the same thing here.
20
+ def self.execute(input:)
21
+ hash_results = self.execute_search(input: input)
22
+
23
+ hash_results.dig(:answer_box, :answer) ||
24
+ hash_results.dig(:answer_box, :snippet) ||
25
+ hash_results.dig(:organic_results, 0, :snippet)
26
+ end
27
+
28
+ # Executes Google Search and returns hash_results JSON
29
+ # @param input [String] search query
30
+ # @return [Hash] hash_results JSON
31
+ def self.execute_search(input:)
32
+ GoogleSearch.new(
33
+ q: input,
34
+ serp_api_key: ENV["SERPAPI_API_KEY"]
35
+ )
36
+ .get_hash
37
+ end
38
+ end
39
+ end
@@ -0,0 +1,23 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'wikipedia'
4
+
5
+ module Tool
6
+ class Wikipedia < Base
7
+ # Tool that adds the capability to search using the Wikipedia API
8
+
9
+ DESCRIPTION = "A wrapper around Wikipedia. " +
10
+ "Useful for when you need to answer general questions about " +
11
+ "people, places, companies, facts, historical events, or other subjects. " +
12
+ "Input should be a search query."
13
+
14
+ # Executes Wikipedia API search and returns the answer
15
+ # @param input [String] search query
16
+ # @return [String] Answer
17
+ def self.execute(input:)
18
+ page = ::Wikipedia.find(input)
19
+ # It would be nice to figure out a way to provide page.content but the LLM token limit is an issue
20
+ page.summary
21
+ end
22
+ end
23
+ end
@@ -11,7 +11,7 @@ module Vectorsearch
11
11
  # @param llm [Symbol] The LLM to use
12
12
  # @param llm_api_key [String] The API key for the LLM
13
13
  def initialize(llm:, llm_api_key:)
14
- validate_llm!(llm: llm)
14
+ LLM::Base.validate_llm!(llm: llm)
15
15
 
16
16
  @llm = llm
17
17
  @llm_api_key = llm_api_key
@@ -54,14 +54,5 @@ module Vectorsearch
54
54
 
55
55
  prompt_template.format(question: question)
56
56
  end
57
-
58
- private
59
-
60
- def validate_llm!(llm:)
61
- # TODO: Fix so this works when `llm` value is a string instead of a symbol
62
- unless LLM::Base::LLMS.keys.include?(llm)
63
- raise ArgumentError, "LLM must be one of #{LLM::Base::LLMS.keys}"
64
- end
65
- end
66
57
  end
67
58
  end
data/lib/version.rb CHANGED
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Langchain
4
- VERSION = "0.2.0"
4
+ VERSION = "0.3.1"
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: langchainrb
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.0
4
+ version: 0.3.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Andrei Bondarev
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2023-05-09 00:00:00.000000000 Z
11
+ date: 2023-05-13 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: pry-byebug
@@ -44,14 +44,28 @@ dependencies:
44
44
  requirements:
45
45
  - - "~>"
46
46
  - !ruby/object:Gem::Version
47
- version: 0.9.1
47
+ version: 0.9.3
48
48
  type: :runtime
49
49
  prerelease: false
50
50
  version_requirements: !ruby/object:Gem::Requirement
51
51
  requirements:
52
52
  - - "~>"
53
53
  - !ruby/object:Gem::Version
54
- version: 0.9.1
54
+ version: 0.9.3
55
+ - !ruby/object:Gem::Dependency
56
+ name: eqn
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - "~>"
60
+ - !ruby/object:Gem::Version
61
+ version: 1.6.5
62
+ type: :runtime
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - "~>"
67
+ - !ruby/object:Gem::Version
68
+ version: 1.6.5
55
69
  - !ruby/object:Gem::Dependency
56
70
  name: milvus
57
71
  requirement: !ruby/object:Gem::Requirement
@@ -108,6 +122,20 @@ dependencies:
108
122
  - - "~>"
109
123
  - !ruby/object:Gem::Version
110
124
  version: 0.9.0
125
+ - !ruby/object:Gem::Dependency
126
+ name: google_search_results
127
+ requirement: !ruby/object:Gem::Requirement
128
+ requirements:
129
+ - - "~>"
130
+ - !ruby/object:Gem::Version
131
+ version: 2.0.0
132
+ type: :runtime
133
+ prerelease: false
134
+ version_requirements: !ruby/object:Gem::Requirement
135
+ requirements:
136
+ - - "~>"
137
+ - !ruby/object:Gem::Version
138
+ version: 2.0.0
111
139
  - !ruby/object:Gem::Dependency
112
140
  name: weaviate-ruby
113
141
  requirement: !ruby/object:Gem::Requirement
@@ -122,6 +150,20 @@ dependencies:
122
150
  - - "~>"
123
151
  - !ruby/object:Gem::Version
124
152
  version: 0.8.0
153
+ - !ruby/object:Gem::Dependency
154
+ name: wikipedia-client
155
+ requirement: !ruby/object:Gem::Requirement
156
+ requirements:
157
+ - - "~>"
158
+ - !ruby/object:Gem::Version
159
+ version: 1.17.0
160
+ type: :runtime
161
+ prerelease: false
162
+ version_requirements: !ruby/object:Gem::Requirement
163
+ requirements:
164
+ - - "~>"
165
+ - !ruby/object:Gem::Version
166
+ version: 1.17.0
125
167
  description: Build ML/AI-powered applications with Ruby's LangChain
126
168
  email:
127
169
  - andrei.bondarev13@gmail.com
@@ -142,6 +184,9 @@ files:
142
184
  - examples/store_and_query_with_pinecone.rb
143
185
  - examples/store_and_query_with_qdrant.rb
144
186
  - examples/store_and_query_with_weaviate.rb
187
+ - lib/agent/base.rb
188
+ - lib/agent/chain_of_thought_agent/chain_of_thought_agent.rb
189
+ - lib/agent/chain_of_thought_agent/chain_of_thought_agent_prompt.json
145
190
  - lib/langchain.rb
146
191
  - lib/llm/base.rb
147
192
  - lib/llm/cohere.rb
@@ -150,6 +195,10 @@ files:
150
195
  - lib/prompt/few_shot_prompt_template.rb
151
196
  - lib/prompt/loading.rb
152
197
  - lib/prompt/prompt_template.rb
198
+ - lib/tool/base.rb
199
+ - lib/tool/calculator.rb
200
+ - lib/tool/serp_api.rb
201
+ - lib/tool/wikipedia.rb
153
202
  - lib/vectorsearch/base.rb
154
203
  - lib/vectorsearch/milvus.rb
155
204
  - lib/vectorsearch/pinecone.rb