langchainrb 0.17.1 → 0.18.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (38) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +10 -0
  3. data/README.md +5 -0
  4. data/lib/langchain/{assistants → assistant}/llm/adapter.rb +1 -1
  5. data/lib/langchain/assistant/llm/adapters/anthropic.rb +105 -0
  6. data/lib/langchain/assistant/llm/adapters/base.rb +63 -0
  7. data/lib/langchain/{assistants → assistant}/llm/adapters/google_gemini.rb +43 -3
  8. data/lib/langchain/{assistants → assistant}/llm/adapters/mistral_ai.rb +39 -2
  9. data/lib/langchain/assistant/llm/adapters/ollama.rb +94 -0
  10. data/lib/langchain/{assistants → assistant}/llm/adapters/openai.rb +38 -2
  11. data/lib/langchain/assistant/messages/anthropic_message.rb +77 -0
  12. data/lib/langchain/assistant/messages/base.rb +56 -0
  13. data/lib/langchain/assistant/messages/google_gemini_message.rb +92 -0
  14. data/lib/langchain/assistant/messages/mistral_ai_message.rb +98 -0
  15. data/lib/langchain/assistant/messages/ollama_message.rb +76 -0
  16. data/lib/langchain/assistant/messages/openai_message.rb +105 -0
  17. data/lib/langchain/{assistants/assistant.rb → assistant.rb} +26 -49
  18. data/lib/langchain/llm/ai21.rb +1 -1
  19. data/lib/langchain/llm/anthropic.rb +59 -4
  20. data/lib/langchain/llm/aws_bedrock.rb +6 -7
  21. data/lib/langchain/llm/azure.rb +1 -1
  22. data/lib/langchain/llm/hugging_face.rb +1 -1
  23. data/lib/langchain/llm/ollama.rb +0 -1
  24. data/lib/langchain/llm/openai.rb +2 -2
  25. data/lib/langchain/llm/parameters/chat.rb +1 -0
  26. data/lib/langchain/llm/replicate.rb +2 -10
  27. data/lib/langchain/version.rb +1 -1
  28. data/lib/langchain.rb +1 -14
  29. metadata +16 -16
  30. data/lib/langchain/assistants/llm/adapters/_base.rb +0 -21
  31. data/lib/langchain/assistants/llm/adapters/anthropic.rb +0 -62
  32. data/lib/langchain/assistants/llm/adapters/ollama.rb +0 -57
  33. data/lib/langchain/assistants/messages/anthropic_message.rb +0 -75
  34. data/lib/langchain/assistants/messages/base.rb +0 -54
  35. data/lib/langchain/assistants/messages/google_gemini_message.rb +0 -90
  36. data/lib/langchain/assistants/messages/mistral_ai_message.rb +0 -96
  37. data/lib/langchain/assistants/messages/ollama_message.rb +0 -74
  38. data/lib/langchain/assistants/messages/openai_message.rb +0 -103
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: f7061ef2090d35626239ca575b60edb291dbbadab7de85a5a2796792e1691437
4
- data.tar.gz: 30cb1f14b602a22e7df8f2dba42660383d44482cbe83fb35dc9539afa836739c
3
+ metadata.gz: f049086ef6cbb25483d360705c216583d30ca17a88e1a286730a00cbc95fa9fd
4
+ data.tar.gz: '07592ccbf768d32122e98fc2cd79eb17547aa004591a9e0d9af3406bb782523d'
5
5
  SHA512:
6
- metadata.gz: dd08fb29bd0ff9237cc27980c3bac607baeb9d54a93f297b1e81fb863b7cbb9720db4adacb3dae92bcbe71d2eb59b38d4de0ee321face467a0b82bde627d2929
7
- data.tar.gz: 4a3661afd2d9d75a64e02f6f173cd0bf0e016207c444ca4506bab907f00dc906f5bbf82c96aad9521280265f214b0f3e82dd5e4ee54dc40f3afb415a6f50b365
6
+ metadata.gz: 32b1bd7ab1d86c3615ba1019476011031eb25429a2ff7d4ad0b3efb65ea04967fac668f9c92ea5a5f9e641ebf0b3bb638ec99a1883b6ef7ba8ddf807245c8ca4
7
+ data.tar.gz: 67be74a35f8a4cc4a5ddd98f40f2e16d179f3127015ec8895f7764e0a1adafa2ac6a02c8493addd0468c04ee4c944cf08fbb3952512a87a05a5d368240d9d924
data/CHANGELOG.md CHANGED
@@ -1,5 +1,15 @@
1
1
  ## [Unreleased]
2
2
 
3
+ ## [0.18.0] - 2024-10-12
4
+ - [BREAKING] Remove `Langchain::Assistant#clear_thread!` method
5
+ - [BREAKING] `Langchain::Messages::*` namespace had migrated to `Langchain::Assistant::Messages::*`
6
+ - [BREAKING] Modify `Langchain::LLM::AwsBedrock` constructor to pass model options via default_options: {...}
7
+ - Introduce `Langchain::Assistant#parallel_tool_calls` options whether to allow the LLM to make multiple parallel tool calls. Default: true
8
+ - Minor improvements to the Langchain::Assistant class
9
+ - Added support for streaming with Anthropic
10
+ - Bump anthropic gem
11
+ - Default Langchain::LLM::Anthropic chat model is "claude-3-5-sonnet-20240620" now
12
+
3
13
  ## [0.17.1] - 2024-10-07
4
14
  - Move Langchain::Assistant::LLM::Adapter-related classes to separate files
5
15
  - Fix Langchain::Tool::Database#describe_table method
data/README.md CHANGED
@@ -132,6 +132,8 @@ Use the `chat` method to generate chat completions:
132
132
  messages = [
133
133
  { role: "system", content: "You are a helpful assistant." },
134
134
  { role: "user", content: "What's the weather like today?" }
135
+ # Google Gemini and Google VertexAI expect messages in a different format:
136
+ # { role: "user", parts: [{ text: "why is the sky blue?" }]
135
137
  ]
136
138
  response = llm.chat(messages: messages)
137
139
  chat_completion = response.chat_completion
@@ -525,11 +527,14 @@ assistant.add_message(content: "Hello")
525
527
  assistant.run(auto_tool_execution: true)
526
528
  ```
527
529
 
530
+ Note that streaming is not currently supported for all LLMs.
531
+
528
532
  ### Configuration
529
533
  * `llm`: The LLM instance to use (required)
530
534
  * `tools`: An array of tool instances (optional)
531
535
  * `instructions`: System instructions for the assistant (optional)
532
536
  * `tool_choice`: Specifies how tools should be selected. Default: "auto". A specific tool function name can be passed. This will force the Assistant to **always** use this function.
537
+ * `parallel_tool_calls`: Whether to make multiple parallel tool calls. Default: true
533
538
  * `add_message_callback`: A callback function (proc, lambda) that is called when any message is added to the conversation (optional)
534
539
 
535
540
  ### Key Methods
@@ -1,4 +1,4 @@
1
- Dir[Pathname.new(__FILE__).dirname.join("adapters", "*.rb")].sort.each { |file| require file }
1
+ # frozen_string_literal: true
2
2
 
3
3
  module Langchain
4
4
  class Assistant
@@ -0,0 +1,105 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Langchain
4
+ class Assistant
5
+ module LLM
6
+ module Adapters
7
+ class Anthropic < Base
8
+ # Build the chat parameters for the Anthropic API
9
+ #
10
+ # @param messages [Array<Hash>] The messages
11
+ # @param instructions [String] The system instructions
12
+ # @param tools [Array<Hash>] The tools to use
13
+ # @param tool_choice [String] The tool choice
14
+ # @param parallel_tool_calls [Boolean] Whether to make parallel tool calls
15
+ # @return [Hash] The chat parameters
16
+ def build_chat_params(
17
+ messages:,
18
+ instructions:,
19
+ tools:,
20
+ tool_choice:,
21
+ parallel_tool_calls:
22
+ )
23
+ params = {messages: messages}
24
+ if tools.any?
25
+ params[:tools] = build_tools(tools)
26
+ params[:tool_choice] = build_tool_choice(tool_choice, parallel_tool_calls)
27
+ end
28
+ params[:system] = instructions if instructions
29
+ params
30
+ end
31
+
32
+ # Build an Anthropic message
33
+ #
34
+ # @param role [String] The role of the message
35
+ # @param content [String] The content of the message
36
+ # @param image_url [String] The image URL
37
+ # @param tool_calls [Array<Hash>] The tool calls
38
+ # @param tool_call_id [String] The tool call ID
39
+ # @return [Messages::AnthropicMessage] The Anthropic message
40
+ def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil)
41
+ Langchain.logger.warn "WARNING: Image URL is not supported by Anthropic currently" if image_url
42
+
43
+ Messages::AnthropicMessage.new(role: role, content: content, tool_calls: tool_calls, tool_call_id: tool_call_id)
44
+ end
45
+
46
+ # Extract the tool call information from the Anthropic tool call hash
47
+ #
48
+ # @param tool_call [Hash] The tool call hash, format: {"type"=>"tool_use", "id"=>"toolu_01TjusbFApEbwKPRWTRwzadR", "name"=>"news_retriever__get_top_headlines", "input"=>{"country"=>"us", "page_size"=>10}}], "stop_reason"=>"tool_use"}
49
+ # @return [Array] The tool call information
50
+ def extract_tool_call_args(tool_call:)
51
+ tool_call_id = tool_call.dig("id")
52
+ function_name = tool_call.dig("name")
53
+ tool_name, method_name = function_name.split("__")
54
+ tool_arguments = tool_call.dig("input").transform_keys(&:to_sym)
55
+ [tool_call_id, tool_name, method_name, tool_arguments]
56
+ end
57
+
58
+ # Build the tools for the Anthropic API
59
+ def build_tools(tools)
60
+ tools.map { |tool| tool.class.function_schemas.to_anthropic_format }.flatten
61
+ end
62
+
63
+ # Get the allowed assistant.tool_choice values for Anthropic
64
+ def allowed_tool_choices
65
+ ["auto", "any"]
66
+ end
67
+
68
+ # Get the available tool function names for Anthropic
69
+ #
70
+ # @param tools [Array<Hash>] The tools
71
+ # @return [Array<String>] The tool function names
72
+ def available_tool_names(tools)
73
+ build_tools(tools).map { |tool| tool.dig(:name) }
74
+ end
75
+
76
+ def tool_role
77
+ Messages::AnthropicMessage::TOOL_ROLE
78
+ end
79
+
80
+ def support_system_message?
81
+ Messages::AnthropicMessage::ROLES.include?("system")
82
+ end
83
+
84
+ private
85
+
86
+ def build_tool_choice(choice, parallel_tool_calls)
87
+ tool_choice_object = {disable_parallel_tool_use: !parallel_tool_calls}
88
+
89
+ case choice
90
+ when "auto"
91
+ tool_choice_object[:type] = "auto"
92
+ when "any"
93
+ tool_choice_object[:type] = "any"
94
+ else
95
+ tool_choice_object[:type] = "tool"
96
+ tool_choice_object[:name] = choice
97
+ end
98
+
99
+ tool_choice_object
100
+ end
101
+ end
102
+ end
103
+ end
104
+ end
105
+ end
@@ -0,0 +1,63 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Langchain
4
+ class Assistant
5
+ module LLM
6
+ module Adapters
7
+ class Base
8
+ # Build the chat parameters for the LLM
9
+ #
10
+ # @param messages [Array] The messages
11
+ # @param instructions [String] The system instructions
12
+ # @param tools [Array] The tools to use
13
+ # @param tool_choice [String] The tool choice
14
+ # @param parallel_tool_calls [Boolean] Whether to make parallel tool calls
15
+ # @return [Hash] The chat parameters
16
+ def build_chat_params(
17
+ messages:,
18
+ instructions:,
19
+ tools:,
20
+ tool_choice:,
21
+ parallel_tool_calls:
22
+ )
23
+ raise NotImplementedError, "Subclasses must implement build_chat_params"
24
+ end
25
+
26
+ # Extract the tool call information from the tool call hash
27
+ #
28
+ # @param tool_call [Hash] The tool call hash
29
+ # @return [Array] The tool call information
30
+ def extract_tool_call_args(tool_call:)
31
+ raise NotImplementedError, "Subclasses must implement extract_tool_call_args"
32
+ end
33
+
34
+ # Build a message for the LLM
35
+ #
36
+ # @param role [String] The role of the message
37
+ # @param content [String] The content of the message
38
+ # @param image_url [String] The image URL
39
+ # @param tool_calls [Array] The tool calls
40
+ # @param tool_call_id [String] The tool call ID
41
+ # @return [Messages::Base] The message
42
+ def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil)
43
+ raise NotImplementedError, "Subclasses must implement build_message"
44
+ end
45
+
46
+ # Does this adapter accept messages with role="system"?
47
+ #
48
+ # @return [Boolean] Whether the adapter supports system messages
49
+ def support_system_message?
50
+ raise NotImplementedError, "Subclasses must implement support_system_message?"
51
+ end
52
+
53
+ # Role name used to return the tool output
54
+ #
55
+ # @return [String] The tool role
56
+ def tool_role
57
+ raise NotImplementedError, "Subclasses must implement tool_role"
58
+ end
59
+ end
60
+ end
61
+ end
62
+ end
63
+ end
@@ -1,9 +1,27 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Langchain
2
4
  class Assistant
3
5
  module LLM
4
6
  module Adapters
5
7
  class GoogleGemini < Base
6
- def build_chat_params(tools:, instructions:, messages:, tool_choice:)
8
+ # Build the chat parameters for the Google Gemini LLM
9
+ #
10
+ # @param messages [Array] The messages
11
+ # @param instructions [String] The system instructions
12
+ # @param tools [Array] The tools to use
13
+ # @param tool_choice [String] The tool choice
14
+ # @param parallel_tool_calls [Boolean] Whether to make parallel tool calls
15
+ # @return [Hash] The chat parameters
16
+ def build_chat_params(
17
+ messages:,
18
+ instructions:,
19
+ tools:,
20
+ tool_choice:,
21
+ parallel_tool_calls:
22
+ )
23
+ Langchain.logger.warn "WARNING: `parallel_tool_calls:` is not supported by Google Gemini currently"
24
+
7
25
  params = {messages: messages}
8
26
  if tools.any?
9
27
  params[:tools] = build_tools(tools)
@@ -13,10 +31,18 @@ module Langchain
13
31
  params
14
32
  end
15
33
 
34
+ # Build a Google Gemini message
35
+ #
36
+ # @param role [String] The role of the message
37
+ # @param content [String] The content of the message
38
+ # @param image_url [String] The image URL
39
+ # @param tool_calls [Array] The tool calls
40
+ # @param tool_call_id [String] The tool call ID
41
+ # @return [Messages::GoogleGeminiMessage] The Google Gemini message
16
42
  def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil)
17
- warn "Image URL is not supported by Google Gemini" if image_url
43
+ Langchain.logger.warn "Image URL is not supported by Google Gemini" if image_url
18
44
 
19
- Langchain::Messages::GoogleGeminiMessage.new(role: role, content: content, tool_calls: tool_calls, tool_call_id: tool_call_id)
45
+ Messages::GoogleGeminiMessage.new(role: role, content: content, tool_calls: tool_calls, tool_call_id: tool_call_id)
20
46
  end
21
47
 
22
48
  # Extract the tool call information from the Google Gemini tool call hash
@@ -31,18 +57,32 @@ module Langchain
31
57
  [tool_call_id, tool_name, method_name, tool_arguments]
32
58
  end
33
59
 
60
+ # Build the tools for the Google Gemini LLM
61
+ #
62
+ # @param tools [Array<Langchain::Tool::Base>] The tools
63
+ # @return [Array] The tools in Google Gemini format
34
64
  def build_tools(tools)
35
65
  tools.map { |tool| tool.class.function_schemas.to_google_gemini_format }.flatten
36
66
  end
37
67
 
68
+ # Get the allowed assistant.tool_choice values for Google Gemini
38
69
  def allowed_tool_choices
39
70
  ["auto", "none"]
40
71
  end
41
72
 
73
+ # Get the available tool names for Google Gemini
42
74
  def available_tool_names(tools)
43
75
  build_tools(tools).map { |tool| tool.dig(:name) }
44
76
  end
45
77
 
78
+ def tool_role
79
+ Messages::GoogleGeminiMessage::TOOL_ROLE
80
+ end
81
+
82
+ def support_system_message?
83
+ Messages::GoogleGeminiMessage::ROLES.include?("system")
84
+ end
85
+
46
86
  private
47
87
 
48
88
  def build_tool_config(choice)
@@ -1,9 +1,27 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Langchain
2
4
  class Assistant
3
5
  module LLM
4
6
  module Adapters
5
7
  class MistralAI < Base
6
- def build_chat_params(tools:, instructions:, messages:, tool_choice:)
8
+ # Build the chat parameters for the Mistral AI LLM
9
+ #
10
+ # @param messages [Array] The messages
11
+ # @param instructions [String] The system instructions
12
+ # @param tools [Array] The tools to use
13
+ # @param tool_choice [String] The tool choice
14
+ # @param parallel_tool_calls [Boolean] Whether to make parallel tool calls
15
+ # @return [Hash] The chat parameters
16
+ def build_chat_params(
17
+ messages:,
18
+ instructions:,
19
+ tools:,
20
+ tool_choice:,
21
+ parallel_tool_calls:
22
+ )
23
+ Langchain.logger.warn "WARNING: `parallel_tool_calls:` is not supported by Mistral AI currently"
24
+
7
25
  params = {messages: messages}
8
26
  if tools.any?
9
27
  params[:tools] = build_tools(tools)
@@ -12,8 +30,16 @@ module Langchain
12
30
  params
13
31
  end
14
32
 
33
+ # Build a Mistral AI message
34
+ #
35
+ # @param role [String] The role of the message
36
+ # @param content [String] The content of the message
37
+ # @param image_url [String] The image URL
38
+ # @param tool_calls [Array] The tool calls
39
+ # @param tool_call_id [String] The tool call ID
40
+ # @return [Messages::MistralAIMessage] The Mistral AI message
15
41
  def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil)
16
- Langchain::Messages::MistralAIMessage.new(role: role, content: content, image_url: image_url, tool_calls: tool_calls, tool_call_id: tool_call_id)
42
+ Messages::MistralAIMessage.new(role: role, content: content, image_url: image_url, tool_calls: tool_calls, tool_call_id: tool_call_id)
17
43
  end
18
44
 
19
45
  # Extract the tool call information from the OpenAI tool call hash
@@ -36,18 +62,29 @@ module Langchain
36
62
  [tool_call_id, tool_name, method_name, tool_arguments]
37
63
  end
38
64
 
65
+ # Build the tools for the Mistral AI LLM
39
66
  def build_tools(tools)
40
67
  tools.map { |tool| tool.class.function_schemas.to_openai_format }.flatten
41
68
  end
42
69
 
70
+ # Get the allowed assistant.tool_choice values for Mistral AI
43
71
  def allowed_tool_choices
44
72
  ["auto", "none"]
45
73
  end
46
74
 
75
+ # Get the available tool names for Mistral AI
47
76
  def available_tool_names(tools)
48
77
  build_tools(tools).map { |tool| tool.dig(:function, :name) }
49
78
  end
50
79
 
80
+ def tool_role
81
+ Messages::MistralAIMessage::TOOL_ROLE
82
+ end
83
+
84
+ def support_system_message?
85
+ Messages::MistralAIMessage::ROLES.include?("system")
86
+ end
87
+
51
88
  private
52
89
 
53
90
  def build_tool_choice(choice)
@@ -0,0 +1,94 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Langchain
4
+ class Assistant
5
+ module LLM
6
+ module Adapters
7
+ class Ollama < Base
8
+ # Build the chat parameters for the Ollama LLM
9
+ #
10
+ # @param messages [Array] The messages
11
+ # @param instructions [String] The system instructions
12
+ # @param tools [Array] The tools to use
13
+ # @param tool_choice [String] The tool choice
14
+ # @param parallel_tool_calls [Boolean] Whether to make parallel tool calls
15
+ # @return [Hash] The chat parameters
16
+ def build_chat_params(
17
+ messages:,
18
+ instructions:,
19
+ tools:,
20
+ tool_choice:,
21
+ parallel_tool_calls:
22
+ )
23
+ Langchain.logger.warn "WARNING: `parallel_tool_calls:` is not supported by Ollama currently"
24
+ Langchain.logger.warn "WARNING: `tool_choice:` is not supported by Ollama currently"
25
+
26
+ params = {messages: messages}
27
+ if tools.any?
28
+ params[:tools] = build_tools(tools)
29
+ end
30
+ params
31
+ end
32
+
33
+ # Build an Ollama message
34
+ #
35
+ # @param role [String] The role of the message
36
+ # @param content [String] The content of the message
37
+ # @param image_url [String] The image URL
38
+ # @param tool_calls [Array] The tool calls
39
+ # @param tool_call_id [String] The tool call ID
40
+ # @return [Messages::OllamaMessage] The Ollama message
41
+ def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil)
42
+ Langchain.logger.warn "WARNING: Image URL is not supported by Ollama currently" if image_url
43
+
44
+ Messages::OllamaMessage.new(role: role, content: content, tool_calls: tool_calls, tool_call_id: tool_call_id)
45
+ end
46
+
47
+ # Extract the tool call information from the OpenAI tool call hash
48
+ #
49
+ # @param tool_call [Hash] The tool call hash
50
+ # @return [Array] The tool call information
51
+ def extract_tool_call_args(tool_call:)
52
+ tool_call_id = tool_call.dig("id")
53
+
54
+ function_name = tool_call.dig("function", "name")
55
+ tool_name, method_name = function_name.split("__")
56
+
57
+ tool_arguments = tool_call.dig("function", "arguments")
58
+ tool_arguments = if tool_arguments.is_a?(Hash)
59
+ Langchain::Utils::HashTransformer.symbolize_keys(tool_arguments)
60
+ else
61
+ JSON.parse(tool_arguments, symbolize_names: true)
62
+ end
63
+
64
+ [tool_call_id, tool_name, method_name, tool_arguments]
65
+ end
66
+
67
+ # Build the tools for the Ollama LLM
68
+ def available_tool_names(tools)
69
+ build_tools(tools).map { |tool| tool.dig(:function, :name) }
70
+ end
71
+
72
+ # Get the allowed assistant.tool_choice values for Ollama
73
+ def allowed_tool_choices
74
+ ["auto", "none"]
75
+ end
76
+
77
+ def tool_role
78
+ Messages::OllamaMessage::TOOL_ROLE
79
+ end
80
+
81
+ def support_system_message?
82
+ Messages::OllamaMessage::ROLES.include?("system")
83
+ end
84
+
85
+ private
86
+
87
+ def build_tools(tools)
88
+ tools.map { |tool| tool.class.function_schemas.to_openai_format }.flatten
89
+ end
90
+ end
91
+ end
92
+ end
93
+ end
94
+ end
@@ -1,19 +1,44 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Langchain
2
4
  class Assistant
3
5
  module LLM
4
6
  module Adapters
5
7
  class OpenAI < Base
6
- def build_chat_params(tools:, instructions:, messages:, tool_choice:)
8
+ # Build the chat parameters for the OpenAI LLM
9
+ #
10
+ # @param messages [Array] The messages
11
+ # @param instructions [String] The system instructions
12
+ # @param tools [Array] The tools to use
13
+ # @param tool_choice [String] The tool choice
14
+ # @param parallel_tool_calls [Boolean] Whether to make parallel tool calls
15
+ # @return [Hash] The chat parameters
16
+ def build_chat_params(
17
+ messages:,
18
+ instructions:,
19
+ tools:,
20
+ tool_choice:,
21
+ parallel_tool_calls:
22
+ )
7
23
  params = {messages: messages}
8
24
  if tools.any?
9
25
  params[:tools] = build_tools(tools)
10
26
  params[:tool_choice] = build_tool_choice(tool_choice)
27
+ params[:parallel_tool_calls] = parallel_tool_calls
11
28
  end
12
29
  params
13
30
  end
14
31
 
32
+ # Build a OpenAI message
33
+ #
34
+ # @param role [String] The role of the message
35
+ # @param content [String] The content of the message
36
+ # @param image_url [String] The image URL
37
+ # @param tool_calls [Array] The tool calls
38
+ # @param tool_call_id [String] The tool call ID
39
+ # @return [Messages::OpenAIMessage] The OpenAI message
15
40
  def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil)
16
- Langchain::Messages::OpenAIMessage.new(role: role, content: content, image_url: image_url, tool_calls: tool_calls, tool_call_id: tool_call_id)
41
+ Messages::OpenAIMessage.new(role: role, content: content, image_url: image_url, tool_calls: tool_calls, tool_call_id: tool_call_id)
17
42
  end
18
43
 
19
44
  # Extract the tool call information from the OpenAI tool call hash
@@ -36,18 +61,29 @@ module Langchain
36
61
  [tool_call_id, tool_name, method_name, tool_arguments]
37
62
  end
38
63
 
64
+ # Build the tools for the OpenAI LLM
39
65
  def build_tools(tools)
40
66
  tools.map { |tool| tool.class.function_schemas.to_openai_format }.flatten
41
67
  end
42
68
 
69
+ # Get the allowed assistant.tool_choice values for OpenAI
43
70
  def allowed_tool_choices
44
71
  ["auto", "none"]
45
72
  end
46
73
 
74
+ # Get the available tool names for OpenAI
47
75
  def available_tool_names(tools)
48
76
  build_tools(tools).map { |tool| tool.dig(:function, :name) }
49
77
  end
50
78
 
79
+ def tool_role
80
+ Messages::OpenAIMessage::TOOL_ROLE
81
+ end
82
+
83
+ def support_system_message?
84
+ Messages::OpenAIMessage::ROLES.include?("system")
85
+ end
86
+
51
87
  private
52
88
 
53
89
  def build_tool_choice(choice)
@@ -0,0 +1,77 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Langchain
4
+ class Assistant
5
+ module Messages
6
+ class AnthropicMessage < Base
7
+ ROLES = [
8
+ "assistant",
9
+ "user",
10
+ "tool_result"
11
+ ].freeze
12
+
13
+ TOOL_ROLE = "tool_result"
14
+
15
+ def initialize(role:, content: nil, tool_calls: [], tool_call_id: nil)
16
+ raise ArgumentError, "Role must be one of #{ROLES.join(", ")}" unless ROLES.include?(role)
17
+ raise ArgumentError, "Tool calls must be an array of hashes" unless tool_calls.is_a?(Array) && tool_calls.all? { |tool_call| tool_call.is_a?(Hash) }
18
+
19
+ @role = role
20
+ # Some Tools return content as a JSON hence `.to_s`
21
+ @content = content.to_s
22
+ @tool_calls = tool_calls
23
+ @tool_call_id = tool_call_id
24
+ end
25
+
26
+ # Convert the message to an Anthropic API-compatible hash
27
+ #
28
+ # @return [Hash] The message as an Anthropic API-compatible hash
29
+ def to_hash
30
+ {}.tap do |h|
31
+ h[:role] = tool? ? "user" : role
32
+
33
+ h[:content] = if tool?
34
+ [
35
+ {
36
+ type: "tool_result",
37
+ tool_use_id: tool_call_id,
38
+ content: content
39
+ }
40
+ ]
41
+ elsif tool_calls.any?
42
+ tool_calls
43
+ else
44
+ content
45
+ end
46
+ end
47
+ end
48
+
49
+ # Check if the message is a tool call
50
+ #
51
+ # @return [Boolean] true/false whether this message is a tool call
52
+ def tool?
53
+ role == "tool_result"
54
+ end
55
+
56
+ # Anthropic does not implement system prompts
57
+ def system?
58
+ false
59
+ end
60
+
61
+ # Check if the message came from an LLM
62
+ #
63
+ # @return [Boolean] true/false whether this message was produced by an LLM
64
+ def assistant?
65
+ role == "assistant"
66
+ end
67
+
68
+ # Check if the message came from an LLM
69
+ #
70
+ # @return [Boolean] true/false whether this message was produced by an LLM
71
+ def llm?
72
+ assistant?
73
+ end
74
+ end
75
+ end
76
+ end
77
+ end