langchainrb 0.6.10 → 0.6.12

Sign up to get free protection for your applications and to get access to all the features.
Files changed (50) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +4 -0
  3. data/README.md +5 -7
  4. data/lib/langchain/agent/base.rb +1 -0
  5. data/lib/langchain/agent/{react_agent/react_agent.rb → react_agent.rb} +12 -11
  6. data/lib/langchain/ai_message.rb +9 -0
  7. data/lib/langchain/conversation.rb +11 -11
  8. data/lib/langchain/conversation_memory.rb +3 -7
  9. data/lib/langchain/human_message.rb +9 -0
  10. data/lib/langchain/llm/anthropic.rb +3 -2
  11. data/lib/langchain/llm/cohere.rb +2 -1
  12. data/lib/langchain/llm/google_palm.rb +15 -10
  13. data/lib/langchain/llm/llama_cpp.rb +5 -5
  14. data/lib/langchain/llm/openai.rb +24 -25
  15. data/lib/langchain/llm/replicate.rb +2 -1
  16. data/lib/langchain/loader.rb +2 -2
  17. data/lib/langchain/message.rb +35 -0
  18. data/lib/langchain/output_parsers/base.rb +5 -4
  19. data/lib/langchain/output_parsers/{fix.rb → output_fixing_parser.rb} +3 -1
  20. data/lib/langchain/prompt/loading.rb +73 -67
  21. data/lib/langchain/prompt.rb +5 -0
  22. data/lib/langchain/system_message.rb +9 -0
  23. data/lib/langchain/tool/base.rb +14 -14
  24. data/lib/langchain/vectorsearch/milvus.rb +46 -5
  25. data/lib/langchain/vectorsearch/pgvector.rb +7 -5
  26. data/lib/langchain/version.rb +1 -1
  27. data/lib/langchain.rb +19 -97
  28. metadata +37 -38
  29. data/.env.example +0 -21
  30. data/.rspec +0 -3
  31. data/.rubocop.yml +0 -11
  32. data/.tool-versions +0 -1
  33. data/Gemfile +0 -14
  34. data/Gemfile.lock +0 -360
  35. data/Rakefile +0 -17
  36. data/examples/conversation_with_openai.rb +0 -52
  37. data/examples/create_and_manage_few_shot_prompt_templates.rb +0 -36
  38. data/examples/create_and_manage_prompt_templates.rb +0 -25
  39. data/examples/create_and_manage_prompt_templates_using_structured_output_parser.rb +0 -116
  40. data/examples/llama_cpp.rb +0 -24
  41. data/examples/open_ai_function_calls.rb +0 -41
  42. data/examples/open_ai_qdrant_function_calls.rb +0 -43
  43. data/examples/pdf_store_and_query_with_chroma.rb +0 -40
  44. data/examples/store_and_query_with_pinecone.rb +0 -46
  45. data/examples/store_and_query_with_qdrant.rb +0 -37
  46. data/examples/store_and_query_with_weaviate.rb +0 -32
  47. data/lefthook.yml +0 -5
  48. data/sig/langchain.rbs +0 -4
  49. /data/lib/langchain/agent/{sql_query_agent/sql_query_agent.rb → sql_query_agent.rb} +0 -0
  50. /data/lib/langchain/output_parsers/{structured.rb → structured_output_parser.rb} +0 -0
@@ -11,82 +11,88 @@ module Langchain::Prompt
11
11
  "few_shot" => ->(config) { load_few_shot_prompt(config) }
12
12
  }
13
13
 
14
- class << self
15
- #
16
- # Load prompt from file.
17
- #
18
- # @param file_path [String, Pathname] The path of the file to read the configuration data from.
19
- #
20
- # @return [Object] The loaded prompt loaded.
21
- #
22
- # @raise [ArgumentError] If the file type of the specified file path is not supported.
23
- #
24
- def load_from_path(file_path:)
25
- file_path = file_path.is_a?(String) ? Pathname.new(file_path) : file_path
26
-
27
- case file_path.extname
28
- when ".json"
29
- config = JSON.parse(File.read(file_path))
30
- when ".yaml", ".yml"
31
- config = YAML.safe_load(File.read(file_path))
32
- else
33
- raise ArgumentError, "Got unsupported file type #{file_path.extname}"
34
- end
35
-
36
- load_from_config(config)
14
+ module Loading
15
+ def self.included(base)
16
+ base.extend ClassMethods
37
17
  end
38
18
 
39
- #
40
- # Loads a prompt template with the given configuration.
41
- #
42
- # @param config [Hash] A hash containing the configuration for the prompt.
43
- #
44
- # @return [PromptTemplate] The loaded prompt loaded.
45
- #
46
- def load_prompt(config)
47
- template, input_variables = config.values_at("template", "input_variables")
48
- PromptTemplate.new(template: template, input_variables: input_variables)
49
- end
19
+ module ClassMethods
20
+ #
21
+ # Load prompt from file.
22
+ #
23
+ # @param file_path [String, Pathname] The path of the file to read the configuration data from.
24
+ #
25
+ # @return [Object] The loaded prompt loaded.
26
+ #
27
+ # @raise [ArgumentError] If the file type of the specified file path is not supported.
28
+ #
29
+ def load_from_path(file_path:)
30
+ file_path = file_path.is_a?(String) ? Pathname.new(file_path) : file_path
50
31
 
51
- #
52
- # Loads a prompt template with the given configuration.
53
- #
54
- # @param config [Hash] A hash containing the configuration for the prompt.
55
- #
56
- # @return [FewShotPromptTemplate] The loaded prompt loaded.
57
- #
58
- def load_few_shot_prompt(config)
59
- prefix, suffix, example_prompt, examples, input_variables = config.values_at("prefix", "suffix", "example_prompt", "examples", "input_variables")
60
- example_prompt = load_prompt(example_prompt)
61
- FewShotPromptTemplate.new(prefix: prefix, suffix: suffix, example_prompt: example_prompt, examples: examples, input_variables: input_variables)
62
- end
32
+ case file_path.extname
33
+ when ".json"
34
+ config = JSON.parse(File.read(file_path))
35
+ when ".yaml", ".yml"
36
+ config = YAML.safe_load(File.read(file_path))
37
+ else
38
+ raise ArgumentError, "Got unsupported file type #{file_path.extname}"
39
+ end
63
40
 
64
- private
41
+ load_from_config(config)
42
+ end
65
43
 
66
- #
67
- # Loads the prompt from the given configuration hash
68
- #
69
- # @param config [Hash] the configuration hash to load from
70
- #
71
- # @return [Object] the loaded prompt
72
- #
73
- # @raise [ArgumentError] if the prompt type specified in the config is not supported
74
- #
75
- def load_from_config(config)
76
- # If `_type` key is not present in the configuration hash, add it with a default value of `prompt`
77
- unless config.key?("_type")
78
- Langchain.logger.warn "No `_type` key found, defaulting to `prompt`"
79
- config["_type"] = "prompt"
44
+ #
45
+ # Loads a prompt template with the given configuration.
46
+ #
47
+ # @param config [Hash] A hash containing the configuration for the prompt.
48
+ #
49
+ # @return [PromptTemplate] The loaded prompt loaded.
50
+ #
51
+ def load_prompt(config)
52
+ template, input_variables = config.values_at("template", "input_variables")
53
+ PromptTemplate.new(template: template, input_variables: input_variables)
80
54
  end
81
55
 
82
- # If the prompt type specified in the configuration hash is not supported, raise an exception
83
- unless TYPE_TO_LOADER.key?(config["_type"])
84
- raise ArgumentError, "Loading #{config["_type"]} prompt not supported"
56
+ #
57
+ # Loads a prompt template with the given configuration.
58
+ #
59
+ # @param config [Hash] A hash containing the configuration for the prompt.
60
+ #
61
+ # @return [FewShotPromptTemplate] The loaded prompt loaded.
62
+ #
63
+ def load_few_shot_prompt(config)
64
+ prefix, suffix, example_prompt, examples, input_variables = config.values_at("prefix", "suffix", "example_prompt", "examples", "input_variables")
65
+ example_prompt = load_prompt(example_prompt)
66
+ FewShotPromptTemplate.new(prefix: prefix, suffix: suffix, example_prompt: example_prompt, examples: examples, input_variables: input_variables)
85
67
  end
86
68
 
87
- # Load the prompt using the corresponding loader function from the `TYPE_TO_LOADER` hash
88
- prompt_loader = TYPE_TO_LOADER[config["_type"]]
89
- prompt_loader.call(config)
69
+ private
70
+
71
+ #
72
+ # Loads the prompt from the given configuration hash
73
+ #
74
+ # @param config [Hash] the configuration hash to load from
75
+ #
76
+ # @return [Object] the loaded prompt
77
+ #
78
+ # @raise [ArgumentError] if the prompt type specified in the config is not supported
79
+ #
80
+ def load_from_config(config)
81
+ # If `_type` key is not present in the configuration hash, add it with a default value of `prompt`
82
+ unless config.key?("_type")
83
+ Langchain.logger.warn "No `_type` key found, defaulting to `prompt`"
84
+ config["_type"] = "prompt"
85
+ end
86
+
87
+ # If the prompt type specified in the configuration hash is not supported, raise an exception
88
+ unless TYPE_TO_LOADER.key?(config["_type"])
89
+ raise ArgumentError, "Loading #{config["_type"]} prompt not supported"
90
+ end
91
+
92
+ # Load the prompt using the corresponding loader function from the `TYPE_TO_LOADER` hash
93
+ prompt_loader = TYPE_TO_LOADER[config["_type"]]
94
+ prompt_loader.call(config)
95
+ end
90
96
  end
91
97
  end
92
98
  end
@@ -0,0 +1,5 @@
1
+ module Langchain
2
+ module Prompt
3
+ include Loading
4
+ end
5
+ end
@@ -0,0 +1,9 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Langchain
4
+ class SystemMessage < Message
5
+ def type
6
+ "system"
7
+ end
8
+ end
9
+ end
@@ -7,16 +7,18 @@ module Langchain::Tool
7
7
  #
8
8
  # == Available Tools
9
9
  #
10
- # - {Langchain::Tool::Calculator}: Calculate the result of a math expression
11
- # - {Langchain::Tool::RubyCodeInterpretor}: Runs ruby code
10
+ # - {Langchain::Tool::Calculator}: calculate the result of a math expression
11
+ # - {Langchain::Tool::Database}: executes SQL queries
12
12
  # - {Langchain::Tool::GoogleSearch}: search on Google (via SerpAPI)
13
+ # - {Langchain::Tool::RubyCodeInterpreter}: runs ruby code
14
+ # - {Langchain::Tool::Weather}: gets current weather data
13
15
  # - {Langchain::Tool::Wikipedia}: search on Wikipedia
14
16
  #
15
17
  # == Usage
16
18
  #
17
19
  # 1. Pick the tools you'd like to pass to an Agent and install the gems listed under **Gem Requirements**
18
20
  #
19
- # # To use all 3 tools:
21
+ # # For example to use the Calculator, GoogleSearch, and Wikipedia:
20
22
  # gem install eqn
21
23
  # gem install google_search_results
22
24
  # gem install wikipedia-client
@@ -28,16 +30,14 @@ module Langchain::Tool
28
30
  # 3. Pass the tools when Agent is instantiated.
29
31
  #
30
32
  # agent = Langchain::Agent::ReActAgent.new(
31
- # llm: :openai, # or :cohere, :hugging_face, :google_palm or :replicate
32
- # llm_api_key: ENV["OPENAI_API_KEY"],
33
- # tools: ["google_search", "calculator", "wikipedia"]
33
+ # llm: Langchain::LLM::OpenAI.new(api_key: "YOUR_API_KEY"), # or other like Cohere, Hugging Face, Google Palm or Replicate
34
+ # tools: [
35
+ # Langchain::Tool::GoogleSearch.new(api_key: "YOUR_API_KEY"),
36
+ # Langchain::Tool::Calculator.new,
37
+ # Langchain::Tool::Wikipedia.new
38
+ # ]
34
39
  # )
35
40
  #
36
- # 4. Confirm that the Agent is using the Tools you passed in:
37
- #
38
- # agent.tools
39
- # # => ["google_search", "calculator", "wikipedia"]
40
- #
41
41
  # == Adding Tools
42
42
  #
43
43
  # 1. Create a new file in lib/langchain/tool/your_tool_name.rb
@@ -53,7 +53,7 @@ module Langchain::Tool
53
53
  #
54
54
  # @return [String] tool name
55
55
  #
56
- def tool_name
56
+ def name
57
57
  self.class.const_get(:NAME)
58
58
  end
59
59
 
@@ -68,7 +68,7 @@ module Langchain::Tool
68
68
  #
69
69
  # @return [String] tool description
70
70
  #
71
- def tool_description
71
+ def description
72
72
  self.class.const_get(:DESCRIPTION)
73
73
  end
74
74
 
@@ -109,7 +109,7 @@ module Langchain::Tool
109
109
  #
110
110
  def self.validate_tools!(tools:)
111
111
  # Check if the tool count is equal to unique tool count
112
- if tools.count != tools.map(&:tool_name).uniq.count
112
+ if tools.count != tools.map(&:name).uniq.count
113
113
  raise ArgumentError, "Either tools are not unique or are conflicting with each other"
114
114
  end
115
115
  end
@@ -32,7 +32,7 @@ module Langchain::Vectorsearch
32
32
  field: Array(texts)
33
33
  }, {
34
34
  field_name: "vectors",
35
- type: ::Milvus::DATA_TYPES["binary_vector"],
35
+ type: ::Milvus::DATA_TYPES["float_vector"],
36
36
  field: Array(texts).map { |text| llm.embed(text: text) }
37
37
  }
38
38
  ]
@@ -47,7 +47,7 @@ module Langchain::Vectorsearch
47
47
  client.collections.create(
48
48
  auto_id: true,
49
49
  collection_name: index_name,
50
- description: "Default schema created by Vectorsearch",
50
+ description: "Default schema created by langchain.rb",
51
51
  fields: [
52
52
  {
53
53
  name: "id",
@@ -66,7 +66,7 @@ module Langchain::Vectorsearch
66
66
  ]
67
67
  }, {
68
68
  name: "vectors",
69
- data_type: ::Milvus::DATA_TYPES["binary_vector"],
69
+ data_type: ::Milvus::DATA_TYPES["float_vector"],
70
70
  is_primary_key: false,
71
71
  type_params: [
72
72
  {
@@ -79,6 +79,20 @@ module Langchain::Vectorsearch
79
79
  )
80
80
  end
81
81
 
82
+ # Create the default index
83
+ # @return [Boolean] The response from the server
84
+ def create_default_index
85
+ client.indices.create(
86
+ collection_name: "Documents",
87
+ field_name: "vectors",
88
+ extra_params: [
89
+ {key: "metric_type", value: "L2"},
90
+ {key: "index_type", value: "IVF_FLAT"},
91
+ {key: "params", value: "{\"nlist\":1024}"}
92
+ ]
93
+ )
94
+ end
95
+
82
96
  # Get the default schema
83
97
  # @return [Hash] The response from the server
84
98
  def get_default_schema
@@ -91,6 +105,12 @@ module Langchain::Vectorsearch
91
105
  client.collections.delete(collection_name: index_name)
92
106
  end
93
107
 
108
+ # Load default schema into memory
109
+ # @return [Boolean] The response from the server
110
+ def load_default_schema
111
+ client.collections.load(collection_name: index_name)
112
+ end
113
+
94
114
  def similarity_search(query:, k: 4)
95
115
  embedding = llm.embed(text: query)
96
116
 
@@ -101,15 +121,36 @@ module Langchain::Vectorsearch
101
121
  end
102
122
 
103
123
  def similarity_search_by_vector(embedding:, k: 4)
124
+ load_default_schema
125
+
104
126
  client.search(
105
127
  collection_name: index_name,
128
+ output_fields: ["id", "content", "vectors"],
106
129
  top_k: k.to_s,
107
130
  vectors: [embedding],
108
131
  dsl_type: 1,
109
132
  params: "{\"nprobe\": 10}",
110
- anns_field: "content",
111
- metric_type: "L2"
133
+ anns_field: "vectors",
134
+ metric_type: "L2",
135
+ vector_type: ::Milvus::DATA_TYPES["float_vector"]
112
136
  )
113
137
  end
138
+
139
+ # Ask a question and return the answer
140
+ # @param question [String] The question to ask
141
+ # @yield [String] Stream responses back one String at a time
142
+ # @return [String] The answer to the question
143
+ def ask(question:, &block)
144
+ search_results = similarity_search(query: question)
145
+
146
+ content_field = search_results.dig("results", "fields_data").select { |field| field.dig("field_name") == "content" }
147
+ content_data = content_field.first.dig("Field", "Scalars", "Data", "StringData", "data")
148
+
149
+ context = content_data.join("\n---\n")
150
+
151
+ prompt = generate_prompt(question: question, context: context)
152
+
153
+ llm.chat(prompt: prompt, &block)
154
+ end
114
155
  end
115
156
  end
@@ -8,7 +8,7 @@ module Langchain::Vectorsearch
8
8
  # Gem requirements: gem "pgvector", "~> 0.2"
9
9
  #
10
10
  # Usage:
11
- # pgvector = Langchain::Vectorsearch::Pgvector.new(url:, index_name:, llm:, namespace_column: nil, namespace: nil)
11
+ # pgvector = Langchain::Vectorsearch::Pgvector.new(url:, index_name:, llm:, namespace: nil)
12
12
  #
13
13
 
14
14
  # The operators supported by the PostgreSQL vector search adapter
@@ -90,20 +90,22 @@ module Langchain::Vectorsearch
90
90
  end
91
91
 
92
92
  # Create default schema
93
- # @return [PG::Result] The response from the database
94
93
  def create_default_schema
95
94
  db.run "CREATE EXTENSION IF NOT EXISTS vector"
96
- namespace = namespace_column
95
+ namespace_column = @namespace_column
97
96
  vector_dimension = default_dimension
98
97
  db.create_table? table_name.to_sym do
99
98
  primary_key :id
100
99
  text :content
101
100
  column :vectors, "vector(#{vector_dimension})"
102
- text namespace.to_sym, default: nil
101
+ text namespace_column.to_sym, default: nil
103
102
  end
104
103
  end
105
104
 
106
- # TODO: Add destroy_default_schema method
105
+ # Destroy default schema
106
+ def destroy_default_schema
107
+ db.drop_table? table_name.to_sym
108
+ end
107
109
 
108
110
  # Search for similar texts in the index
109
111
  # @param query [String] The text to search for
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Langchain
4
- VERSION = "0.6.10"
4
+ VERSION = "0.6.12"
5
5
  end
data/lib/langchain.rb CHANGED
@@ -3,8 +3,25 @@
3
3
  require "logger"
4
4
  require "pathname"
5
5
  require "colorize"
6
-
7
- require_relative "./langchain/version"
6
+ require "zeitwerk"
7
+ loader = Zeitwerk::Loader.for_gem
8
+ loader.ignore("#{__dir__}/langchainrb.rb")
9
+ loader.inflector.inflect(
10
+ "ai_message" => "AIMessage",
11
+ "ai21" => "AI21",
12
+ "ai21_validator" => "AI21Validator",
13
+ "csv" => "CSV",
14
+ "html" => "HTML",
15
+ "json" => "JSON",
16
+ "jsonl" => "JSONL",
17
+ "llm" => "LLM",
18
+ "openai" => "OpenAI",
19
+ "openai_validator" => "OpenAIValidator",
20
+ "pdf" => "PDF",
21
+ "react_agent" => "ReActAgent",
22
+ "sql_query_agent" => "SQLQueryAgent"
23
+ )
24
+ loader.setup
8
25
 
9
26
  # Langchain.rb a is library for building LLM-backed Ruby applications. It is an abstraction layer that sits on top of the emerging AI-related tools that makes it easy for developers to consume and string those services together.
10
27
  #
@@ -48,13 +65,6 @@ require_relative "./langchain/version"
48
65
  #
49
66
  # Langchain.logger.level = :info
50
67
  module Langchain
51
- autoload :Loader, "langchain/loader"
52
- autoload :Data, "langchain/data"
53
- autoload :Conversation, "langchain/conversation"
54
- autoload :ConversationMemory, "langchain/conversation_memory"
55
- autoload :DependencyHelper, "langchain/dependency_helper"
56
- autoload :ContextualLogger, "langchain/contextual_logger"
57
-
58
68
  class << self
59
69
  # @return [ContextualLogger]
60
70
  attr_reader :logger
@@ -73,95 +83,7 @@ module Langchain
73
83
 
74
84
  @root = Pathname.new(__dir__)
75
85
 
76
- module Agent
77
- autoload :Base, "langchain/agent/base"
78
- autoload :ReActAgent, "langchain/agent/react_agent/react_agent.rb"
79
- autoload :SQLQueryAgent, "langchain/agent/sql_query_agent/sql_query_agent.rb"
80
- end
81
-
82
- module Chunker
83
- autoload :Base, "langchain/chunker/base"
84
- autoload :Text, "langchain/chunker/text"
85
- autoload :RecursiveText, "langchain/chunker/recursive_text"
86
- end
87
-
88
- module Tool
89
- autoload :Base, "langchain/tool/base"
90
- autoload :Calculator, "langchain/tool/calculator"
91
- autoload :RubyCodeInterpreter, "langchain/tool/ruby_code_interpreter"
92
- autoload :GoogleSearch, "langchain/tool/google_search"
93
- autoload :Weather, "langchain/tool/weather"
94
- autoload :Wikipedia, "langchain/tool/wikipedia"
95
- autoload :Database, "langchain/tool/database"
96
- end
97
-
98
- module Processors
99
- autoload :Base, "langchain/processors/base"
100
- autoload :CSV, "langchain/processors/csv"
101
- autoload :Docx, "langchain/processors/docx"
102
- autoload :HTML, "langchain/processors/html"
103
- autoload :JSON, "langchain/processors/json"
104
- autoload :JSONL, "langchain/processors/jsonl"
105
- autoload :PDF, "langchain/processors/pdf"
106
- autoload :Text, "langchain/processors/text"
107
- autoload :Xlsx, "langchain/processors/xlsx"
108
- end
109
-
110
- module Utils
111
- module TokenLength
112
- autoload :BaseValidator, "langchain/utils/token_length/base_validator"
113
- autoload :AI21Validator, "langchain/utils/token_length/ai21_validator"
114
- autoload :CohereValidator, "langchain/utils/token_length/cohere_validator"
115
- autoload :GooglePalmValidator, "langchain/utils/token_length/google_palm_validator"
116
- autoload :OpenAIValidator, "langchain/utils/token_length/openai_validator"
117
- autoload :TokenLimitExceeded, "langchain/utils/token_length/token_limit_exceeded"
118
- end
119
- end
120
-
121
- module Vectorsearch
122
- autoload :Base, "langchain/vectorsearch/base"
123
- autoload :Chroma, "langchain/vectorsearch/chroma"
124
- autoload :Hnswlib, "langchain/vectorsearch/hnswlib"
125
- autoload :Milvus, "langchain/vectorsearch/milvus"
126
- autoload :Pinecone, "langchain/vectorsearch/pinecone"
127
- autoload :Pgvector, "langchain/vectorsearch/pgvector"
128
- autoload :Qdrant, "langchain/vectorsearch/qdrant"
129
- autoload :Weaviate, "langchain/vectorsearch/weaviate"
130
- end
131
-
132
- module LLM
133
- autoload :AI21, "langchain/llm/ai21"
134
- autoload :Anthropic, "langchain/llm/anthropic"
135
- autoload :Base, "langchain/llm/base"
136
- autoload :Cohere, "langchain/llm/cohere"
137
- autoload :GooglePalm, "langchain/llm/google_palm"
138
- autoload :HuggingFace, "langchain/llm/hugging_face"
139
- autoload :LlamaCpp, "langchain/llm/llama_cpp"
140
- autoload :OpenAI, "langchain/llm/openai"
141
- autoload :Replicate, "langchain/llm/replicate"
142
- end
143
-
144
- module Prompt
145
- require_relative "langchain/prompt/loading"
146
-
147
- autoload :Base, "langchain/prompt/base"
148
- autoload :PromptTemplate, "langchain/prompt/prompt_template"
149
- autoload :FewShotPromptTemplate, "langchain/prompt/few_shot_prompt_template"
150
- end
151
-
152
- module ActiveRecord
153
- autoload :Hooks, "langchain/active_record/hooks"
154
- end
155
-
156
- module OutputParsers
157
- autoload :Base, "langchain/output_parsers/base"
158
- autoload :StructuredOutputParser, "langchain/output_parsers/structured"
159
- autoload :OutputFixingParser, "langchain/output_parsers/fix"
160
- end
161
-
162
86
  module Errors
163
87
  class BaseError < StandardError; end
164
88
  end
165
89
  end
166
-
167
- require "langchain/railtie" if defined?(Rails)