langchainrb 0.16.1 → 0.17.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,57 @@
1
+ module Langchain
2
+ class Assistant
3
+ module LLM
4
+ module Adapters
5
+ class Ollama < Base
6
+ def build_chat_params(tools:, instructions:, messages:, tool_choice:)
7
+ params = {messages: messages}
8
+ if tools.any?
9
+ params[:tools] = build_tools(tools)
10
+ end
11
+ params
12
+ end
13
+
14
+ def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil)
15
+ warn "Image URL is not supported by Ollama currently" if image_url
16
+
17
+ Langchain::Messages::OllamaMessage.new(role: role, content: content, tool_calls: tool_calls, tool_call_id: tool_call_id)
18
+ end
19
+
20
+ # Extract the tool call information from the OpenAI tool call hash
21
+ #
22
+ # @param tool_call [Hash] The tool call hash
23
+ # @return [Array] The tool call information
24
+ def extract_tool_call_args(tool_call:)
25
+ tool_call_id = tool_call.dig("id")
26
+
27
+ function_name = tool_call.dig("function", "name")
28
+ tool_name, method_name = function_name.split("__")
29
+
30
+ tool_arguments = tool_call.dig("function", "arguments")
31
+ tool_arguments = if tool_arguments.is_a?(Hash)
32
+ Langchain::Utils::HashTransformer.symbolize_keys(tool_arguments)
33
+ else
34
+ JSON.parse(tool_arguments, symbolize_names: true)
35
+ end
36
+
37
+ [tool_call_id, tool_name, method_name, tool_arguments]
38
+ end
39
+
40
+ def available_tool_names(tools)
41
+ build_tools(tools).map { |tool| tool.dig(:function, :name) }
42
+ end
43
+
44
+ def allowed_tool_choices
45
+ ["auto", "none"]
46
+ end
47
+
48
+ private
49
+
50
+ def build_tools(tools)
51
+ tools.map { |tool| tool.class.function_schemas.to_openai_format }.flatten
52
+ end
53
+ end
54
+ end
55
+ end
56
+ end
57
+ end
@@ -0,0 +1,65 @@
1
+ module Langchain
2
+ class Assistant
3
+ module LLM
4
+ module Adapters
5
+ class OpenAI < Base
6
+ def build_chat_params(tools:, instructions:, messages:, tool_choice:)
7
+ params = {messages: messages}
8
+ if tools.any?
9
+ params[:tools] = build_tools(tools)
10
+ params[:tool_choice] = build_tool_choice(tool_choice)
11
+ end
12
+ params
13
+ end
14
+
15
+ def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil)
16
+ Langchain::Messages::OpenAIMessage.new(role: role, content: content, image_url: image_url, tool_calls: tool_calls, tool_call_id: tool_call_id)
17
+ end
18
+
19
+ # Extract the tool call information from the OpenAI tool call hash
20
+ #
21
+ # @param tool_call [Hash] The tool call hash
22
+ # @return [Array] The tool call information
23
+ def extract_tool_call_args(tool_call:)
24
+ tool_call_id = tool_call.dig("id")
25
+
26
+ function_name = tool_call.dig("function", "name")
27
+ tool_name, method_name = function_name.split("__")
28
+
29
+ tool_arguments = tool_call.dig("function", "arguments")
30
+ tool_arguments = if tool_arguments.is_a?(Hash)
31
+ Langchain::Utils::HashTransformer.symbolize_keys(tool_arguments)
32
+ else
33
+ JSON.parse(tool_arguments, symbolize_names: true)
34
+ end
35
+
36
+ [tool_call_id, tool_name, method_name, tool_arguments]
37
+ end
38
+
39
+ def build_tools(tools)
40
+ tools.map { |tool| tool.class.function_schemas.to_openai_format }.flatten
41
+ end
42
+
43
+ def allowed_tool_choices
44
+ ["auto", "none"]
45
+ end
46
+
47
+ def available_tool_names(tools)
48
+ build_tools(tools).map { |tool| tool.dig(:function, :name) }
49
+ end
50
+
51
+ private
52
+
53
+ def build_tool_choice(choice)
54
+ case choice
55
+ when "auto"
56
+ choice
57
+ else
58
+ {"type" => "function", "function" => {"name" => choice}}
59
+ end
60
+ end
61
+ end
62
+ end
63
+ end
64
+ end
65
+ end
@@ -3,7 +3,11 @@
3
3
  module Langchain
4
4
  module Messages
5
5
  class Base
6
- attr_reader :role, :content, :tool_calls, :tool_call_id
6
+ attr_reader :role,
7
+ :content,
8
+ :image_url,
9
+ :tool_calls,
10
+ :tool_call_id
7
11
 
8
12
  # Check if the message came from a user
9
13
  #
@@ -15,17 +15,20 @@ module Langchain
15
15
 
16
16
  # Initialize a new MistralAI message
17
17
  #
18
- # @param [String] The role of the message
19
- # @param [String] The content of the message
20
- # @param [Array<Hash>] The tool calls made in the message
21
- # @param [String] The ID of the tool call
22
- def initialize(role:, content: nil, tool_calls: [], tool_call_id: nil) # TODO: Implement image_file: reference (https://platform.openai.com/docs/api-reference/messages/object#messages/object-content)
18
+ # @param role [String] The role of the message
19
+ # @param content [String] The content of the message
20
+ # @param image_url [String] The URL of the image
21
+ # @param tool_calls [Array<Hash>] The tool calls made in the message
22
+ # @param tool_call_id [String] The ID of the tool call
23
+ def initialize(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil) # TODO: Implement image_file: reference (https://platform.openai.com/docs/api-reference/messages/object#messages/object-content)
23
24
  raise ArgumentError, "Role must be one of #{ROLES.join(", ")}" unless ROLES.include?(role)
24
25
  raise ArgumentError, "Tool calls must be an array of hashes" unless tool_calls.is_a?(Array) && tool_calls.all? { |tool_call| tool_call.is_a?(Hash) }
25
26
 
26
27
  @role = role
27
28
  # Some Tools return content as a JSON hence `.to_s`
28
29
  @content = content.to_s
30
+ # Make sure you're using the Pixtral model if you want to send image_url
31
+ @image_url = image_url
29
32
  @tool_calls = tool_calls
30
33
  @tool_call_id = tool_call_id
31
34
  end
@@ -43,9 +46,28 @@ module Langchain
43
46
  def to_hash
44
47
  {}.tap do |h|
45
48
  h[:role] = role
46
- h[:content] = content if content # Content is nil for tool calls
47
- h[:tool_calls] = tool_calls if tool_calls.any?
48
- h[:tool_call_id] = tool_call_id if tool_call_id
49
+
50
+ if tool_calls.any?
51
+ h[:tool_calls] = tool_calls
52
+ else
53
+ h[:tool_call_id] = tool_call_id if tool_call_id
54
+
55
+ h[:content] = []
56
+
57
+ if content && !content.empty?
58
+ h[:content] << {
59
+ type: "text",
60
+ text: content
61
+ }
62
+ end
63
+
64
+ if image_url
65
+ h[:content] << {
66
+ type: "image_url",
67
+ image_url: image_url
68
+ }
69
+ end
70
+ end
49
71
  end
50
72
  end
51
73
 
@@ -15,17 +15,25 @@ module Langchain
15
15
 
16
16
  # Initialize a new OpenAI message
17
17
  #
18
- # @param [String] The role of the message
19
- # @param [String] The content of the message
20
- # @param [Array<Hash>] The tool calls made in the message
21
- # @param [String] The ID of the tool call
22
- def initialize(role:, content: nil, tool_calls: [], tool_call_id: nil) # TODO: Implement image_file: reference (https://platform.openai.com/docs/api-reference/messages/object#messages/object-content)
18
+ # @param role [String] The role of the message
19
+ # @param content [String] The content of the message
20
+ # @param image_url [String] The URL of the image
21
+ # @param tool_calls [Array<Hash>] The tool calls made in the message
22
+ # @param tool_call_id [String] The ID of the tool call
23
+ def initialize(
24
+ role:,
25
+ content: nil,
26
+ image_url: nil,
27
+ tool_calls: [],
28
+ tool_call_id: nil
29
+ )
23
30
  raise ArgumentError, "Role must be one of #{ROLES.join(", ")}" unless ROLES.include?(role)
24
31
  raise ArgumentError, "Tool calls must be an array of hashes" unless tool_calls.is_a?(Array) && tool_calls.all? { |tool_call| tool_call.is_a?(Hash) }
25
32
 
26
33
  @role = role
27
34
  # Some Tools return content as a JSON hence `.to_s`
28
35
  @content = content.to_s
36
+ @image_url = image_url
29
37
  @tool_calls = tool_calls
30
38
  @tool_call_id = tool_call_id
31
39
  end
@@ -43,9 +51,30 @@ module Langchain
43
51
  def to_hash
44
52
  {}.tap do |h|
45
53
  h[:role] = role
46
- h[:content] = content if content # Content is nil for tool calls
47
- h[:tool_calls] = tool_calls if tool_calls.any?
48
- h[:tool_call_id] = tool_call_id if tool_call_id
54
+
55
+ if tool_calls.any?
56
+ h[:tool_calls] = tool_calls
57
+ else
58
+ h[:tool_call_id] = tool_call_id if tool_call_id
59
+
60
+ h[:content] = []
61
+
62
+ if content && !content.empty?
63
+ h[:content] << {
64
+ type: "text",
65
+ text: content
66
+ }
67
+ end
68
+
69
+ if image_url
70
+ h[:content] << {
71
+ type: "image_url",
72
+ image_url: {
73
+ url: image_url
74
+ }
75
+ }
76
+ end
77
+ end
49
78
  end
50
79
  end
51
80
 
@@ -14,7 +14,7 @@ module Langchain::LLM
14
14
  DEFAULTS = {
15
15
  temperature: 0.0,
16
16
  completion_model_name: "claude-2.1",
17
- chat_completion_model_name: "claude-3-sonnet-20240229",
17
+ chat_completion_model_name: "claude-3-5-sonnet-20240620",
18
18
  max_tokens_to_sample: 256
19
19
  }.freeze
20
20
 
@@ -109,6 +109,7 @@ module Langchain::LLM
109
109
  raise ArgumentError.new("model argument is required") if parameters[:model].empty?
110
110
  raise ArgumentError.new("max_tokens argument is required") if parameters[:max_tokens].nil?
111
111
 
112
+ binding.pry
112
113
  response = client.messages(parameters: parameters)
113
114
 
114
115
  Langchain::LLM::AnthropicResponse.new(response)
@@ -11,9 +11,8 @@ module Langchain::LLM
11
11
  # - {Langchain::LLM::Anthropic}
12
12
  # - {Langchain::LLM::Azure}
13
13
  # - {Langchain::LLM::Cohere}
14
- # - {Langchain::LLM::GooglePalm}
15
- # - {Langchain::LLM::GoogleVertexAI}
16
14
  # - {Langchain::LLM::GoogleGemini}
15
+ # - {Langchain::LLM::GoogleVertexAI}
17
16
  # - {Langchain::LLM::HuggingFace}
18
17
  # - {Langchain::LLM::LlamaCpp}
19
18
  # - {Langchain::LLM::OpenAI}
@@ -122,7 +122,7 @@ module Langchain::Tool
122
122
  end
123
123
  db.foreign_key_list(table).each do |fk|
124
124
  schema << ",\n" if fk == db.foreign_key_list(table).first
125
- schema << "FOREIGN KEY (#{fk[:columns][0]}) REFERENCES #{fk[:table]}(#{fk[:key][0]})"
125
+ schema << "FOREIGN KEY (#{fk[:columns]&.first}) REFERENCES #{fk[:table]}(#{fk[:key]&.first})"
126
126
  schema << ",\n" unless fk == db.foreign_key_list(table).last
127
127
  end
128
128
  schema << ");\n"
@@ -37,7 +37,7 @@ module Langchain::Vectorsearch
37
37
  @options = {
38
38
  url: url,
39
39
  request_timeout: 20,
40
- log: false
40
+ logger: Langchain.logger
41
41
  }.merge(es_options)
42
42
 
43
43
  @es_client = ::Elasticsearch::Client.new(**options)
@@ -6,16 +6,18 @@ module Langchain::Vectorsearch
6
6
  # Wrapper around Milvus REST APIs.
7
7
  #
8
8
  # Gem requirements:
9
- # gem "milvus", "~> 0.9.3"
9
+ # gem "milvus", "~> 0.10.3"
10
10
  #
11
11
  # Usage:
12
- # milvus = Langchain::Vectorsearch::Milvus.new(url:, index_name:, llm:, api_key:)
12
+ # milvus = Langchain::Vectorsearch::Milvus.new(url:, index_name:, llm:, api_key:)
13
13
  #
14
-
15
14
  def initialize(url:, index_name:, llm:, api_key: nil)
16
15
  depends_on "milvus"
17
16
 
18
- @client = ::Milvus::Client.new(url: url)
17
+ @client = ::Milvus::Client.new(
18
+ url: url,
19
+ logger: Langchain.logger
20
+ )
19
21
  @index_name = index_name
20
22
 
21
23
  super(llm: llm)
@@ -24,33 +26,24 @@ module Langchain::Vectorsearch
24
26
  def add_texts(texts:)
25
27
  client.entities.insert(
26
28
  collection_name: index_name,
27
- num_rows: Array(texts).size,
28
- fields_data: [
29
- {
30
- field_name: "content",
31
- type: ::Milvus::DATA_TYPES["varchar"],
32
- field: Array(texts)
33
- }, {
34
- field_name: "vectors",
35
- type: ::Milvus::DATA_TYPES["float_vector"],
36
- field: Array(texts).map { |text| llm.embed(text: text).embedding }
37
- }
38
- ]
29
+ data: texts.map do |text|
30
+ {content: text, vector: llm.embed(text: text).embedding}
31
+ end
39
32
  )
40
33
  end
41
34
 
35
+ # TODO: Add update_texts method
36
+
42
37
  # Deletes a list of texts in the index
43
38
  #
44
39
  # @param ids [Array<Integer>] The ids of texts to delete
45
40
  # @return [Boolean] The response from the server
46
41
  def remove_texts(ids:)
47
42
  raise ArgumentError, "ids must be an array" unless ids.is_a?(Array)
48
- # Convert ids to integers if strings are passed
49
- ids = ids.map(&:to_i)
50
43
 
51
44
  client.entities.delete(
52
45
  collection_name: index_name,
53
- expression: "id in #{ids}"
46
+ filter: "id in #{ids}"
54
47
  )
55
48
  end
56
49
 
@@ -62,33 +55,25 @@ module Langchain::Vectorsearch
62
55
  client.collections.create(
63
56
  auto_id: true,
64
57
  collection_name: index_name,
65
- description: "Default schema created by langchain.rb",
66
58
  fields: [
67
59
  {
68
- name: "id",
69
- is_primary_key: true,
70
- autoID: true,
71
- data_type: ::Milvus::DATA_TYPES["int64"]
60
+ fieldName: "id",
61
+ isPrimary: true,
62
+ dataType: "Int64"
72
63
  }, {
73
- name: "content",
74
- is_primary_key: false,
75
- data_type: ::Milvus::DATA_TYPES["varchar"],
76
- type_params: [
77
- {
78
- key: "max_length",
79
- value: "32768" # Largest allowed value
80
- }
81
- ]
64
+ fieldName: "content",
65
+ isPrimary: false,
66
+ dataType: "VarChar",
67
+ elementTypeParams: {
68
+ max_length: "32768" # Largest allowed value
69
+ }
82
70
  }, {
83
- name: "vectors",
84
- data_type: ::Milvus::DATA_TYPES["float_vector"],
85
- is_primary_key: false,
86
- type_params: [
87
- {
88
- key: "dim",
89
- value: llm.default_dimensions.to_s
90
- }
91
- ]
71
+ fieldName: "vector",
72
+ isPrimary: false,
73
+ dataType: "FloatVector",
74
+ elementTypeParams: {
75
+ dim: llm.default_dimensions.to_s
76
+ }
92
77
  }
93
78
  ]
94
79
  )
@@ -97,13 +82,17 @@ module Langchain::Vectorsearch
97
82
  # Create the default index
98
83
  # @return [Boolean] The response from the server
99
84
  def create_default_index
100
- client.indices.create(
85
+ client.indexes.create(
101
86
  collection_name: index_name,
102
- field_name: "vectors",
103
- extra_params: [
104
- {key: "metric_type", value: "L2"},
105
- {key: "index_type", value: "IVF_FLAT"},
106
- {key: "params", value: "{\"nlist\":1024}"}
87
+ index_params: [
88
+ {
89
+ metricType: "L2",
90
+ fieldName: "vector",
91
+ indexName: "vector_idx",
92
+ indexConfig: {
93
+ index_type: "AUTOINDEX"
94
+ }
95
+ }
107
96
  ]
108
97
  )
109
98
  end
@@ -111,13 +100,13 @@ module Langchain::Vectorsearch
111
100
  # Get the default schema
112
101
  # @return [Hash] The response from the server
113
102
  def get_default_schema
114
- client.collections.get(collection_name: index_name)
103
+ client.collections.describe(collection_name: index_name)
115
104
  end
116
105
 
117
106
  # Delete default schema
118
107
  # @return [Hash] The response from the server
119
108
  def destroy_default_schema
120
- client.collections.delete(collection_name: index_name)
109
+ client.collections.drop(collection_name: index_name)
121
110
  end
122
111
 
123
112
  # Load default schema into memory
@@ -138,16 +127,12 @@ module Langchain::Vectorsearch
138
127
  def similarity_search_by_vector(embedding:, k: 4)
139
128
  load_default_schema
140
129
 
141
- client.search(
130
+ client.entities.search(
142
131
  collection_name: index_name,
143
- output_fields: ["id", "content"], # Add "vectors" if need to have full vectors returned.
144
- top_k: k.to_s,
145
- vectors: [embedding],
146
- dsl_type: 1,
147
- params: "{\"nprobe\": 10}",
148
- anns_field: "vectors",
149
- metric_type: "L2",
150
- vector_type: ::Milvus::DATA_TYPES["float_vector"]
132
+ anns_field: "vector",
133
+ data: [embedding],
134
+ limit: k,
135
+ output_fields: ["content", "id", "vector"]
151
136
  )
152
137
  end
153
138
 
@@ -159,8 +144,7 @@ module Langchain::Vectorsearch
159
144
  def ask(question:, k: 4, &block)
160
145
  search_results = similarity_search(query: question, k: k)
161
146
 
162
- content_field = search_results.dig("results", "fields_data").select { |field| field.dig("field_name") == "content" }
163
- content_data = content_field.first.dig("Field", "Scalars", "Data", "StringData", "data")
147
+ content_data = search_results.dig("data").map { |result| result.dig("content") }
164
148
 
165
149
  context = content_data.join("\n---\n")
166
150
 
@@ -6,7 +6,7 @@ module Langchain::Vectorsearch
6
6
  # Wrapper around Qdrant
7
7
  #
8
8
  # Gem requirements:
9
- # gem "qdrant-ruby", "~> 0.9.3"
9
+ # gem "qdrant-ruby", "~> 0.9.8"
10
10
  #
11
11
  # Usage:
12
12
  # qdrant = Langchain::Vectorsearch::Qdrant.new(url:, api_key:, index_name:, llm:)
@@ -22,7 +22,8 @@ module Langchain::Vectorsearch
22
22
 
23
23
  @client = ::Qdrant::Client.new(
24
24
  url: url,
25
- api_key: api_key
25
+ api_key: api_key,
26
+ logger: Langchain.logger
26
27
  )
27
28
  @index_name = index_name
28
29
 
@@ -6,7 +6,7 @@ module Langchain::Vectorsearch
6
6
  # Wrapper around Weaviate
7
7
  #
8
8
  # Gem requirements:
9
- # gem "weaviate-ruby", "~> 0.9.0"
9
+ # gem "weaviate-ruby", "~> 0.9.2"
10
10
  #
11
11
  # Usage:
12
12
  # weaviate = Langchain::Vectorsearch::Weaviate.new(url: ENV["WEAVIATE_URL"], api_key: ENV["WEAVIATE_API_KEY"], index_name: "Docs", llm: llm)
@@ -22,7 +22,8 @@ module Langchain::Vectorsearch
22
22
 
23
23
  @client = ::Weaviate::Client.new(
24
24
  url: url,
25
- api_key: api_key
25
+ api_key: api_key,
26
+ logger: Langchain.logger
26
27
  )
27
28
 
28
29
  # Weaviate requires the class name to be Capitalized: https://weaviate.io/developers/weaviate/configuration/schema-configuration#create-a-class
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Langchain
4
- VERSION = "0.16.1"
4
+ VERSION = "0.17.1"
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: langchainrb
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.16.1
4
+ version: 0.17.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Andrei Bondarev
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2024-09-30 00:00:00.000000000 Z
11
+ date: 2024-10-07 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: baran
@@ -318,20 +318,6 @@ dependencies:
318
318
  - - ">="
319
319
  - !ruby/object:Gem::Version
320
320
  version: '0'
321
- - !ruby/object:Gem::Dependency
322
- name: google_palm_api
323
- requirement: !ruby/object:Gem::Requirement
324
- requirements:
325
- - - "~>"
326
- - !ruby/object:Gem::Version
327
- version: 0.1.3
328
- type: :development
329
- prerelease: false
330
- version_requirements: !ruby/object:Gem::Requirement
331
- requirements:
332
- - - "~>"
333
- - !ruby/object:Gem::Version
334
- version: 0.1.3
335
321
  - !ruby/object:Gem::Dependency
336
322
  name: google_search_results
337
323
  requirement: !ruby/object:Gem::Requirement
@@ -380,14 +366,14 @@ dependencies:
380
366
  requirements:
381
367
  - - "~>"
382
368
  - !ruby/object:Gem::Version
383
- version: 0.9.3
369
+ version: 0.10.3
384
370
  type: :development
385
371
  prerelease: false
386
372
  version_requirements: !ruby/object:Gem::Requirement
387
373
  requirements:
388
374
  - - "~>"
389
375
  - !ruby/object:Gem::Version
390
- version: 0.9.3
376
+ version: 0.10.3
391
377
  - !ruby/object:Gem::Dependency
392
378
  name: llama_cpp
393
379
  requirement: !ruby/object:Gem::Requirement
@@ -520,14 +506,14 @@ dependencies:
520
506
  requirements:
521
507
  - - "~>"
522
508
  - !ruby/object:Gem::Version
523
- version: 0.9.4
509
+ version: 0.9.8
524
510
  type: :development
525
511
  prerelease: false
526
512
  version_requirements: !ruby/object:Gem::Requirement
527
513
  requirements:
528
514
  - - "~>"
529
515
  - !ruby/object:Gem::Version
530
- version: 0.9.4
516
+ version: 0.9.8
531
517
  - !ruby/object:Gem::Dependency
532
518
  name: roo
533
519
  requirement: !ruby/object:Gem::Requirement
@@ -604,14 +590,14 @@ dependencies:
604
590
  requirements:
605
591
  - - "~>"
606
592
  - !ruby/object:Gem::Version
607
- version: 0.8.10
593
+ version: 0.9.2
608
594
  type: :development
609
595
  prerelease: false
610
596
  version_requirements: !ruby/object:Gem::Requirement
611
597
  requirements:
612
598
  - - "~>"
613
599
  - !ruby/object:Gem::Version
614
- version: 0.8.10
600
+ version: 0.9.2
615
601
  - !ruby/object:Gem::Dependency
616
602
  name: wikipedia-client
617
603
  requirement: !ruby/object:Gem::Requirement
@@ -652,6 +638,13 @@ files:
652
638
  - README.md
653
639
  - lib/langchain.rb
654
640
  - lib/langchain/assistants/assistant.rb
641
+ - lib/langchain/assistants/llm/adapter.rb
642
+ - lib/langchain/assistants/llm/adapters/_base.rb
643
+ - lib/langchain/assistants/llm/adapters/anthropic.rb
644
+ - lib/langchain/assistants/llm/adapters/google_gemini.rb
645
+ - lib/langchain/assistants/llm/adapters/mistral_ai.rb
646
+ - lib/langchain/assistants/llm/adapters/ollama.rb
647
+ - lib/langchain/assistants/llm/adapters/openai.rb
655
648
  - lib/langchain/assistants/messages/anthropic_message.rb
656
649
  - lib/langchain/assistants/messages/base.rb
657
650
  - lib/langchain/assistants/messages/google_gemini_message.rb
@@ -683,7 +676,6 @@ files:
683
676
  - lib/langchain/llm/base.rb
684
677
  - lib/langchain/llm/cohere.rb
685
678
  - lib/langchain/llm/google_gemini.rb
686
- - lib/langchain/llm/google_palm.rb
687
679
  - lib/langchain/llm/google_vertex_ai.rb
688
680
  - lib/langchain/llm/hugging_face.rb
689
681
  - lib/langchain/llm/llama_cpp.rb
@@ -701,7 +693,6 @@ files:
701
693
  - lib/langchain/llm/response/base_response.rb
702
694
  - lib/langchain/llm/response/cohere_response.rb
703
695
  - lib/langchain/llm/response/google_gemini_response.rb
704
- - lib/langchain/llm/response/google_palm_response.rb
705
696
  - lib/langchain/llm/response/hugging_face_response.rb
706
697
  - lib/langchain/llm/response/llama_cpp_response.rb
707
698
  - lib/langchain/llm/response/mistral_ai_response.rb