langchainrb 0.3.6 → 0.3.7

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 576c331bc4372bc1934a6a75fc496469242d0b645b56ffb9163cd9f812007414
4
- data.tar.gz: 82a9ea6c734cab1490fbe8060732c41c4aba6e3d06d6895ea40e9f2f5db2f6ac
3
+ metadata.gz: 93a3fcc195fbdf55ec52402c1db2f11c929069c03afa90477259e6bf2f542957
4
+ data.tar.gz: 737e456d831e40e8c388a1986f2483f9dff3934c8b4e05a9456529e017075637
5
5
  SHA512:
6
- metadata.gz: ce0325a59c2257e35c0be5e3e78ad44d046a058070585ba8127b97338c4280d4897f0dd070b16b9bea9cc0f5f7cd7f6b330611bb49f6180ecb0dafc55bb77d16
7
- data.tar.gz: f38012ae7d0da8c70d76f37f2a24431e709fe0841e62812f1471a1b9a2b33763235c4eb246bb6f20d61e50e903c137156abb851a25b4d46e4b7fd85333f2dfe2
6
+ metadata.gz: 23619f8e256a9856eb113afce8eef94f759beb84644a87d9b67cac4fac9d5aedfc06978b3baedad1d988308b3cdb72d32e19a0f5e21d8b8431e8c9ff04eda548
7
+ data.tar.gz: 6cd0fcc55553a5472e2ac6c69a6e49dbd4b52fe8bcc899c04710ae40397c39947a3418649737c0d21a99086f85736d52f5968b0cc8af761f186eedf790ba85db
data/.env.example ADDED
@@ -0,0 +1,11 @@
1
+ COHERE_API_KEY=
2
+ HUGGING_FACE_API_KEY=
3
+ MILVUS_URL=
4
+ OPENAI_API_KEY=
5
+ PINECONE_API_KEY=
6
+ PINECONE_ENVIRONMENT=
7
+ QDRANT_API_KEY=
8
+ QDRANT_URL=
9
+ SERPAPI_API_KEY=
10
+ WEAVIATE_API_KEY=
11
+ WEAVIATE_URL=
data/CHANGELOG.md CHANGED
@@ -1,5 +1,10 @@
1
1
  ## [Unreleased]
2
2
 
3
+ ## [0.3.7] - 2023-05-19
4
+ - Loaders
5
+ - Introduce `Loaders::Text` to parse .txt files
6
+ - Introduec `Loaders::PDF` to parse .pdf files
7
+
3
8
  ## [0.3.6] - 2023-05-17
4
9
  - LLMs
5
10
  - Bump `hugging-face` gem version
data/Gemfile.lock CHANGED
@@ -1,11 +1,12 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- langchainrb (0.3.6)
4
+ langchainrb (0.3.7)
5
5
 
6
6
  GEM
7
7
  remote: https://rubygems.org/
8
8
  specs:
9
+ Ascii85 (1.0.3)
9
10
  actionpack (7.0.4.3)
10
11
  actionview (= 7.0.4.3)
11
12
  activesupport (= 7.0.4.3)
@@ -26,6 +27,7 @@ GEM
26
27
  tzinfo (~> 2.0)
27
28
  addressable (2.8.4)
28
29
  public_suffix (>= 2.0.2, < 6.0)
30
+ afm (0.2.2)
29
31
  ast (2.4.2)
30
32
  builder (3.2.4)
31
33
  byebug (11.1.3)
@@ -114,6 +116,7 @@ GEM
114
116
  graphql-client (0.18.0)
115
117
  activesupport (>= 3.0)
116
118
  graphql
119
+ hashery (2.1.2)
117
120
  httparty (0.21.0)
118
121
  mini_mime (>= 1.0.0)
119
122
  multi_xml (>= 0.5.2)
@@ -144,6 +147,12 @@ GEM
144
147
  parallel (1.23.0)
145
148
  parser (3.2.2.1)
146
149
  ast (~> 2.4.1)
150
+ pdf-reader (1.4.1)
151
+ Ascii85 (~> 1.0.0)
152
+ afm (~> 0.2.1)
153
+ hashery (~> 2.0)
154
+ ruby-rc4
155
+ ttfunk
147
156
  pinecone (0.1.71)
148
157
  dry-struct (~> 1.6.0)
149
158
  dry-validation (~> 1.10.0)
@@ -211,6 +220,7 @@ GEM
211
220
  faraday (>= 1)
212
221
  faraday-multipart (>= 1)
213
222
  ruby-progressbar (1.13.0)
223
+ ruby-rc4 (0.1.5)
214
224
  ruby2_keywords (0.0.5)
215
225
  standard (1.28.2)
216
226
  language_server-protocol (~> 3.17.0.2)
@@ -228,6 +238,7 @@ GEM
228
238
  thor (1.2.1)
229
239
  treetop (1.6.12)
230
240
  polyglot (~> 0.3)
241
+ ttfunk (1.7.0)
231
242
  tzinfo (2.0.6)
232
243
  concurrent-ruby (~> 1.0)
233
244
  unicode-display_width (2.4.2)
@@ -252,6 +263,7 @@ DEPENDENCIES
252
263
  hugging-face (~> 0.3.3)
253
264
  langchainrb!
254
265
  milvus (~> 0.9.0)
266
+ pdf-reader (~> 1.4)
255
267
  pinecone (~> 0.1.6)
256
268
  pry-byebug (~> 3.10.0)
257
269
  qdrant-ruby (~> 0.9.0)
data/README.md CHANGED
@@ -62,7 +62,7 @@ client.create_default_schema
62
62
  ```
63
63
 
64
64
  ```ruby
65
- # Store your documents in your vector search database
65
+ # Store plain texts in your vector search database
66
66
  client.add_texts(
67
67
  texts: [
68
68
  "Begin by preheating your oven to 375°F (190°C). Prepare four boneless, skinless chicken breasts by cutting a pocket into the side of each breast, being careful not to cut all the way through. Season the chicken with salt and pepper to taste. In a large skillet, melt 2 tablespoons of unsalted butter over medium heat. Add 1 small diced onion and 2 minced garlic cloves, and cook until softened, about 3-4 minutes. Add 8 ounces of fresh spinach and cook until wilted, about 3 minutes. Remove the skillet from heat and let the mixture cool slightly.",
@@ -70,7 +70,13 @@ client.add_texts(
70
70
  ]
71
71
  )
72
72
  ```
73
+ ```ruby
74
+ # Store the contents of your files in your vector search database
75
+ my_pdf = Langchain.root.join("path/to/my.pdf")
76
+ my_text = Langchain.root.join("path/to/my.txt")
73
77
 
78
+ client.add_data(paths: [my_pdf, my_text])
79
+ ```
74
80
  ```ruby
75
81
  # Retrieve similar documents based on the query string passed in
76
82
  client.similarity_search(
@@ -233,11 +239,21 @@ agent.run(question: "How many full soccer fields would be needed to cover the di
233
239
 
234
240
  #### Available Tools 🛠️
235
241
 
236
- | Name | Description | Requirements |
237
- | -------- | :------------------: | :------------------: |
238
- | "calculator" | Useful for getting the result of a math expression | |
239
- | "search" | A wrapper around Google Search | `ENV["SERPAPI_API_KEY"]` (https://serpapi.com/manage-api-key)
240
- | "wikipedia" | Calls Wikipedia API to retrieve the summary | |
242
+ | Name | Description | ENV Requirements | Gem Requirements |
243
+ | ------------ | :------------------------------------------------: | :-----------------------------------------------------------: | :---------------------------------------: |
244
+ | "calculator" | Useful for getting the result of a math expression | | `gem "eqn", "~> 1.6.5"` |
245
+ | "search" | A wrapper around Google Search | `ENV["SERPAPI_API_KEY"]` (https://serpapi.com/manage-api-key) | `gem "google_search_results", "~> 2.0.0"` | |
246
+ | "wikipedia" | Calls Wikipedia API to retrieve the summary | | `gem "wikipedia-client", "~> 1.17.0"` |
247
+
248
+
249
+ #### Loaders 🚚
250
+
251
+ Need to read data from various sources? Load it up.
252
+
253
+ | Name | Class | Gem Requirements |
254
+ | ---- | ------------- | :--------------------------: |
255
+ | pdf | Loaders::PDF | `gem "pdf-reader", "~> 1.4"` |
256
+ | text | Loaders::Text | |
241
257
 
242
258
 
243
259
  ## Logging
@@ -251,9 +267,10 @@ Langchain.logger.level = :info
251
267
 
252
268
  ## Development
253
269
 
254
- After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
255
-
256
- To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and the created tag, and push the `.gem` file to [rubygems.org](https://rubygems.org).
270
+ 1. `git clone https://github.com/andreibondarev/langchainrb.git`
271
+ 2. `cp .env.example .env`, then fill out the environment variables in `.env`
272
+ 3. `rspec spec/` to ensure that the tests pass
273
+ 4. `bin/console` to load the gem in a REPL session. Feel free to add your own instances of LLMs, Tools, Agents, etc. and experiment with them.
257
274
 
258
275
  ## Core Contributors
259
276
  [<img style="border-radius:50%" alt="Andrei Bondarev" src="https://avatars.githubusercontent.com/u/541665?v=4" width="80" height="80" class="avatar">](https://github.com/andreibondarev)
@@ -261,8 +278,9 @@ To install this gem onto your local machine, run `bundle exec rake install`. To
261
278
  ## Honorary Contributors
262
279
  [<img style="border-radius:50%" alt="Andrei Bondarev" src="https://avatars.githubusercontent.com/u/541665?v=4" width="80" height="80" class="avatar">](https://github.com/andreibondarev)
263
280
  [<img style="border-radius:50%" alt="Rafael Figueiredo" src="https://avatars.githubusercontent.com/u/35845775?v=4" width="80" height="80" class="avatar">](https://github.com/rafaelqfigueiredo)
281
+ [<img style="border-radius:50%" alt="Ricky Chilcott" src="https://avatars.githubusercontent.com/u/445759?v=4" width="80" height="80" class="avatar">](https://github.com/rickychilcott)
264
282
 
265
- (Criteria of becoming an Honorary Contributor or Core Contributor is pending...)
283
+ (Criteria for becoming an Honorary Contributor or Core Contributor is pending...)
266
284
 
267
285
  ## Contributing
268
286
 
@@ -43,7 +43,7 @@ module Agent
43
43
 
44
44
  loop do
45
45
  Langchain.logger.info("Agent: Passing the prompt to the #{llm} LLM")
46
- response = llm_client.generate_completion(
46
+ response = llm_client.complete(
47
47
  prompt: prompt,
48
48
  stop_sequences: ["Observation:"],
49
49
  max_tokens: 500
@@ -100,7 +100,7 @@ module Agent
100
100
  # @return [PromptTemplate] PromptTemplate instance
101
101
  def prompt_template
102
102
  @template ||= Prompt.load_from_path(
103
- file_path: Pathname.new(__dir__).join("chain_of_thought_agent_prompt.json")
103
+ file_path: Langchain.root.join("agent/chain_of_thought_agent/chain_of_thought_agent_prompt.json")
104
104
  )
105
105
  end
106
106
  end
@@ -1,5 +1,13 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # This method requires and loads the given gem, and then checks to see if the version of the gem meets the requirements listed in `langchain.gemspec`
4
+ # This solution was built to avoid auto-loading every single gem in the Gemfile when the developer will mostly likely be only using a few of them.
5
+ #
6
+ # @param gem_name [String] The name of the gem to load
7
+ # @return [Boolean] Whether or not the gem was loaded successfully
8
+ # @raise [LoadError] If the gem is not installed
9
+ # @raise [LoadError] If the gem is installed, but the version does not meet the requirements
10
+ #
3
11
  def depends_on(gem_name)
4
12
  gem(gem_name) # require the gem
5
13
 
data/lib/langchain.rb CHANGED
@@ -1,8 +1,21 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ require "logger"
4
+
3
5
  require_relative "./version"
4
6
  require_relative "./dependency_helper"
5
- require_relative "./logging"
7
+ module Langchain
8
+ class << self
9
+ attr_accessor :default_loaders
10
+ attr_accessor :logger
11
+
12
+ attr_reader :root
13
+ end
14
+
15
+ @logger ||= ::Logger.new($stdout, level: :warn, formatter: ->(severity, datetime, progname, msg) { "[LangChain.rb] #{msg}\n" })
16
+
17
+ @root = Pathname.new(__dir__)
18
+ end
6
19
 
7
20
  module Agent
8
21
  autoload :Base, "agent/base"
@@ -38,3 +51,14 @@ module Tool
38
51
  autoload :SerpApi, "tool/serp_api"
39
52
  autoload :Wikipedia, "tool/wikipedia"
40
53
  end
54
+
55
+ module Loaders
56
+ autoload :Base, "loaders/base"
57
+ autoload :PDF, "loaders/pdf"
58
+ autoload :Text, "loaders/text"
59
+ end
60
+
61
+ autoload :Loader, "loader"
62
+
63
+ # Load the default Loaders
64
+ Langchain.default_loaders ||= [::Loaders::Text, ::Loaders::PDF]
data/lib/llm/base.rb CHANGED
@@ -16,6 +16,21 @@ module LLM
16
16
  self.class.const_get(:DEFAULTS).dig(:dimension)
17
17
  end
18
18
 
19
+ # Method supported by an LLM that generates a response for a given chat-style prompt
20
+ def chat(...)
21
+ raise NotImplementedError, "#{self.class.name} does not support chat"
22
+ end
23
+
24
+ # Method supported by an LLM that completes a given prompt
25
+ def complete(...)
26
+ raise NotImplementedError, "#{self.class.name} does not support completion"
27
+ end
28
+
29
+ # Method supported by an LLM that generates an embedding for a given text or array of texts
30
+ def embed(...)
31
+ raise NotImplementedError, "#{self.class.name} does not support generating embeddings"
32
+ end
33
+
19
34
  # Ensure that the LLM value passed in is supported
20
35
  # @param llm [Symbol] The LLM to use
21
36
  def self.validate_llm!(llm:)
data/lib/llm/cohere.rb CHANGED
@@ -47,7 +47,11 @@ module LLM
47
47
  response.dig("generations").first.dig("text")
48
48
  end
49
49
 
50
- alias_method :generate_completion, :complete
50
+ # Cohere does not have a dedicated chat endpoint, so instead we call `complete()`
51
+ def chat(...)
52
+ complete(...)
53
+ end
54
+
51
55
  alias_method :generate_embedding, :embed
52
56
  end
53
57
  end
@@ -25,7 +25,7 @@ module LLM
25
25
  # @param text [String] The text to embed
26
26
  # @return [Array] The embedding
27
27
  def embed(text:)
28
- response = client.embedding(
28
+ client.embedding(
29
29
  input: text,
30
30
  model: DEFAULTS[:embeddings_model_name]
31
31
  )
data/lib/llm/openai.rb CHANGED
@@ -5,6 +5,7 @@ module LLM
5
5
  DEFAULTS = {
6
6
  temperature: 0.0,
7
7
  completion_model_name: "text-davinci-003",
8
+ chat_completion_model_name: "gpt-3.5-turbo",
8
9
  embeddings_model_name: "text-embedding-ada-002",
9
10
  dimension: 1536
10
11
  }.freeze
@@ -50,7 +51,27 @@ module LLM
50
51
  response.dig("choices", 0, "text")
51
52
  end
52
53
 
53
- alias_method :generate_completion, :complete
54
+ # Generate a chat completion for a given prompt
55
+ # @param prompt [String] The prompt to generate a chat completion for
56
+ # @return [String] The chat completion
57
+ def chat(prompt:, **params)
58
+ default_params = {
59
+ model: DEFAULTS[:chat_completion_model_name],
60
+ temperature: DEFAULTS[:temperature],
61
+ # TODO: Figure out how to introduce persisted conversations
62
+ messages: [{role: "user", content: prompt}]
63
+ }
64
+
65
+ if params[:stop_sequences]
66
+ default_params[:stop] = params.delete(:stop_sequences)
67
+ end
68
+
69
+ default_params.merge!(params)
70
+
71
+ response = client.chat(parameters: default_params)
72
+ response.dig("choices", 0, "message", "content")
73
+ end
74
+
54
75
  alias_method :generate_embedding, :embed
55
76
  end
56
77
  end
data/lib/loader.rb ADDED
@@ -0,0 +1,26 @@
1
+ module Loader
2
+ def self.with(*loaders)
3
+ LoaderSet.new(loaders)
4
+ end
5
+
6
+ class LoaderSet
7
+ def initialize(loaders)
8
+ @loaders = Array(loaders)
9
+ end
10
+
11
+ def load(*paths)
12
+ Array(paths)
13
+ .flatten
14
+ .map { |path| first_loadable_loader(path)&.load }
15
+ .compact
16
+ end
17
+
18
+ def first_loadable_loader(path)
19
+ @loaders
20
+ .each do |loader_klass|
21
+ loader_instance = loader_klass.new(path)
22
+ return(loader_instance) if loader_instance.loadable?
23
+ end
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,17 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Loaders
4
+ class Base
5
+ def self.load(path)
6
+ new.load(path)
7
+ end
8
+
9
+ def initialize(path)
10
+ @path = path
11
+ end
12
+
13
+ def loadable?
14
+ raise NotImplementedError
15
+ end
16
+ end
17
+ end
@@ -0,0 +1,34 @@
1
+ module Loaders
2
+ class PDF < Base
3
+ #
4
+ # This Loader parses PDF files into text.
5
+ # If you'd like to use it directly you can do so like this:
6
+ # Loaders::PDF.new("path/to/my.pdf").load
7
+ #
8
+ # This parser is also invoked when you're adding data to a Vectorsearch DB:
9
+ # qdrant = Vectorsearch::Qdrant.new(...)
10
+ # path = Langchain.root.join("path/to/my.pdf")
11
+ # qdrant.add_data(path: path)
12
+ #
13
+
14
+ def initialize(path)
15
+ depends_on "pdf-reader"
16
+ require "pdf-reader"
17
+
18
+ @path = path
19
+ end
20
+
21
+ # Check that the file is a PDF file
22
+ def loadable?
23
+ @path.to_s.end_with?(".pdf")
24
+ end
25
+
26
+ def load
27
+ ::PDF::Reader
28
+ .new(@path)
29
+ .pages
30
+ .map(&:text)
31
+ .join("\n\n")
32
+ end
33
+ end
34
+ end
@@ -0,0 +1,22 @@
1
+ module Loaders
2
+ class Text < Base
3
+ #
4
+ # This Loader parses .txt files.
5
+ # If you'd like to use it directly you can do so like this:
6
+ # Loaders::Text.new("path/to/my.txt").load
7
+ #
8
+ # This parser is also invoked when you're adding data to a Vectorsearch DB:
9
+ # qdrant = Vectorsearch::Qdrant.new(...)
10
+ # path = Langchain.root.join("path/to/my.txt")
11
+ # qdrant.add_data(path: path)
12
+ #
13
+
14
+ def loadable?
15
+ true
16
+ end
17
+
18
+ def load
19
+ @path.read
20
+ end
21
+ end
22
+ end
data/lib/tool/base.rb CHANGED
@@ -12,8 +12,7 @@ module Tool
12
12
  TOOLS = {
13
13
  "calculator" => "Tool::Calculator",
14
14
  "search" => "Tool::SerpApi",
15
- "wikipedia" => "Tool::Wikipedia",
16
- "news" => "Tool::News"
15
+ "wikipedia" => "Tool::Wikipedia"
17
16
  }
18
17
 
19
18
  def self.description(value)
@@ -19,24 +19,38 @@ module Vectorsearch
19
19
  @llm_api_key = llm_api_key
20
20
 
21
21
  @llm_client = LLM.const_get(LLM::Base::LLMS.fetch(llm)).new(api_key: llm_api_key)
22
+
23
+ @loaders = Langchain.default_loaders
22
24
  end
23
25
 
26
+ # Method supported by Vectorsearch DB to create a default schema
24
27
  def create_default_schema
25
- raise NotImplementedError
28
+ raise NotImplementedError, "#{self.class.name} does not support creating a default schema"
29
+ end
30
+
31
+ # Method supported by Vectorsearch DB to add a list of texts to the index
32
+ def add_texts(...)
33
+ raise NotImplementedError, "#{self.class.name} does not support adding texts"
26
34
  end
27
35
 
28
- def add_texts(texts:)
29
- raise NotImplementedError
36
+ # Method supported by Vectorsearch DB to search for similar texts in the index
37
+ def similarity_search(...)
38
+ raise NotImplementedError, "#{self.class.name} does not support similarity search"
30
39
  end
31
40
 
32
- # NotImplementedError will be raised if the subclass does not implement this method
33
- def ask(question:)
34
- raise NotImplementedError
41
+ # Method supported by Vectorsearch DB to search for similar texts in the index by the passed in vector.
42
+ # You must generate your own vector using the same LLM that generated the embeddings stored in the Vectorsearch DB.
43
+ def similarity_search_by_vector(...)
44
+ raise NotImplementedError, "#{self.class.name} does not support similarity search by vector"
45
+ end
46
+
47
+ # Method supported by Vectorsearch DB to answer a question given a context (data) pulled from your Vectorsearch DB.
48
+ def ask(...)
49
+ raise NotImplementedError, "#{self.class.name} does not support asking questions"
35
50
  end
36
51
 
37
52
  def_delegators :llm_client,
38
53
  :generate_embedding,
39
- :generate_completion,
40
54
  :default_dimension
41
55
 
42
56
  def generate_prompt(question:, context:)
@@ -56,5 +70,23 @@ module Vectorsearch
56
70
 
57
71
  prompt_template.format(question: question)
58
72
  end
73
+
74
+ def add_data(path: nil, paths: nil)
75
+ raise ArgumentError, "Either path or paths must be provided" if path.nil? && paths.nil?
76
+ raise ArgumentError, "Either path or paths must be provided, not both" if !path.nil? && !paths.nil?
77
+
78
+ texts =
79
+ Loader
80
+ .with(*loaders)
81
+ .load(path || paths)
82
+
83
+ add_texts(texts: texts)
84
+ end
85
+
86
+ attr_reader :loaders
87
+
88
+ def add_loader(*loaders)
89
+ loaders.each { |loader| @loaders << loader }
90
+ end
59
91
  end
60
92
  end
@@ -88,9 +88,5 @@ module Vectorsearch
88
88
  metric_type: "L2"
89
89
  )
90
90
  end
91
-
92
- def ask(question:)
93
- raise NotImplementedError
94
- end
95
91
  end
96
92
  end
@@ -96,7 +96,7 @@ module Vectorsearch
96
96
 
97
97
  prompt = generate_prompt(question: question, context: context)
98
98
 
99
- generate_completion(prompt: prompt)
99
+ llm_client.chat(prompt: prompt)
100
100
  end
101
101
  end
102
102
  end
@@ -96,7 +96,7 @@ module Vectorsearch
96
96
 
97
97
  prompt = generate_prompt(question: question, context: context)
98
98
 
99
- generate_completion(prompt: prompt)
99
+ llm_client.chat(prompt: prompt)
100
100
  end
101
101
  end
102
102
  end
@@ -113,7 +113,7 @@ module Vectorsearch
113
113
 
114
114
  prompt = generate_prompt(question: question, context: context)
115
115
 
116
- generate_completion(prompt: prompt)
116
+ llm_client.chat(prompt: prompt)
117
117
  end
118
118
  end
119
119
  end
data/lib/version.rb CHANGED
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Langchain
4
- VERSION = "0.3.6"
4
+ VERSION = "0.3.7"
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: langchainrb
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.6
4
+ version: 0.3.7
5
5
  platform: ruby
6
6
  authors:
7
7
  - Andrei Bondarev
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2023-05-17 00:00:00.000000000 Z
11
+ date: 2023-05-19 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: dotenv-rails
@@ -108,6 +108,20 @@ dependencies:
108
108
  - - "~>"
109
109
  - !ruby/object:Gem::Version
110
110
  version: 0.9.0
111
+ - !ruby/object:Gem::Dependency
112
+ name: pdf-reader
113
+ requirement: !ruby/object:Gem::Requirement
114
+ requirements:
115
+ - - "~>"
116
+ - !ruby/object:Gem::Version
117
+ version: '1.4'
118
+ type: :development
119
+ prerelease: false
120
+ version_requirements: !ruby/object:Gem::Requirement
121
+ requirements:
122
+ - - "~>"
123
+ - !ruby/object:Gem::Version
124
+ version: '1.4'
111
125
  - !ruby/object:Gem::Dependency
112
126
  name: pinecone
113
127
  requirement: !ruby/object:Gem::Requirement
@@ -185,6 +199,7 @@ executables: []
185
199
  extensions: []
186
200
  extra_rdoc_files: []
187
201
  files:
202
+ - ".env.example"
188
203
  - ".rspec"
189
204
  - CHANGELOG.md
190
205
  - Gemfile
@@ -207,7 +222,10 @@ files:
207
222
  - lib/llm/cohere.rb
208
223
  - lib/llm/hugging_face.rb
209
224
  - lib/llm/openai.rb
210
- - lib/logging.rb
225
+ - lib/loader.rb
226
+ - lib/loaders/base.rb
227
+ - lib/loaders/pdf.rb
228
+ - lib/loaders/text.rb
211
229
  - lib/prompt/base.rb
212
230
  - lib/prompt/few_shot_prompt_template.rb
213
231
  - lib/prompt/loading.rb
data/lib/logging.rb DELETED
@@ -1,13 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- require "logger"
4
-
5
- module Langchain
6
- def self.logger
7
- @@logger ||= Logger.new($stdout, level: :warn, formatter: ->(severity, datetime, progname, msg) { "[LangChain.rb] #{msg}\n" })
8
- end
9
-
10
- def self.logger=(instance)
11
- @@logger = instance
12
- end
13
- end