langchainrb 0.15.3 → 0.15.4
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +7 -0
- data/README.md +84 -52
- data/lib/langchain/assistants/assistant.rb +195 -26
- data/lib/langchain/assistants/messages/mistral_ai_message.rb +74 -0
- data/lib/langchain/llm/azure.rb +3 -0
- data/lib/langchain/llm/google_gemini.rb +0 -1
- data/lib/langchain/llm/mistral_ai.rb +1 -1
- data/lib/langchain/llm/ollama.rb +3 -3
- data/lib/langchain/llm/response/mistral_ai_response.rb +9 -1
- data/lib/langchain/llm/response/ollama_response.rb +1 -1
- data/lib/langchain/tool/database.rb +48 -28
- data/lib/langchain/version.rb +1 -1
- data/lib/langchain.rb +1 -0
- metadata +3 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 68d2d64fb264bf47488e83581540b88f7746f0e1b1b318d7dfc9c15356f40c8c
|
4
|
+
data.tar.gz: efbd840632f0f22b202d9257a2020c4bc49cea7528d79efa7e05f963e9e4745f
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: f57817cc62de3af8f9aa80c62e421255e4172f55d54d91c1adc7d7a03ac9272e866670532c996a19d502c6d3110627d778efdad8c4faad0b07868c8b0aebc81c
|
7
|
+
data.tar.gz: b35d82314edc0d747c87a37bd0e2036755f884e85977639e8b53063ca8ebd7002b9dd4ae9a5dd443f088552e575471e080dc9aa4519e25efaa908e16130b613a
|
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,12 @@
|
|
1
1
|
## [Unreleased]
|
2
2
|
|
3
|
+
## [0.15.4] - 2024-08-30
|
4
|
+
- Improve the Langchain::Tool::Database tool
|
5
|
+
- Allow explictly setting tool_choice on the Assistant instance
|
6
|
+
- Add support for bulk embedding in Ollama
|
7
|
+
- `Langchain::Assistant` works with `Langchain::LLM::MistralAI` llm
|
8
|
+
- Fix Langchain::LLM::Azure not applying full default_options
|
9
|
+
|
3
10
|
## [0.15.3] - 2024-08-27
|
4
11
|
- Fix OpenAI#embed when text-embedding-ada-002 is used
|
5
12
|
|
data/README.md
CHANGED
@@ -402,75 +402,107 @@ client.ask(question: "...")
|
|
402
402
|
```
|
403
403
|
|
404
404
|
## Assistants
|
405
|
-
|
406
|
-
|
407
|
-
### Available Tools 🛠️
|
408
|
-
|
409
|
-
| Name | Description | ENV Requirements | Gem Requirements |
|
410
|
-
| ------------ | :------------------------------------------------: | :-----------------------------------------------------------: | :---------------------------------------: |
|
411
|
-
| "calculator" | Useful for getting the result of a math expression | | `gem "eqn", "~> 1.6.5"` |
|
412
|
-
| "database" | Useful for querying a SQL database | | `gem "sequel", "~> 5.68.0"` |
|
413
|
-
| "file_system" | Interacts with the file system | | |
|
414
|
-
| "ruby_code_interpreter" | Interprets Ruby expressions | | `gem "safe_ruby", "~> 1.0.4"` |
|
415
|
-
| "google_search" | A wrapper around Google Search | `ENV["SERPAPI_API_KEY"]` (https://serpapi.com/manage-api-key) | `gem "google_search_results", "~> 2.0.0"` |
|
416
|
-
| "news_retriever" | A wrapper around NewsApi.org | `ENV["NEWS_API_KEY"]` (https://newsapi.org/) | |
|
417
|
-
| "tavily" | A wrapper around Tavily AI | `ENV["TAVILY_API_KEY"]` (https://tavily.com/) | |
|
418
|
-
| "weather" | Calls Open Weather API to retrieve the current weather | `ENV["OPEN_WEATHER_API_KEY"]` (https://home.openweathermap.org/api_keys) | |
|
419
|
-
| "wikipedia" | Calls Wikipedia API to retrieve the summary | | `gem "wikipedia-client", "~> 1.17.0"` |
|
405
|
+
`Langchain::Assistant` is a powerful and flexible class that combines Large Language Models (LLMs), tools, and conversation management to create intelligent, interactive assistants. It's designed to handle complex conversations, execute tools, and provide coherent responses based on the context of the interaction.
|
420
406
|
|
421
|
-
###
|
422
|
-
|
423
|
-
|
424
|
-
|
407
|
+
### Features
|
408
|
+
* Supports multiple LLM providers (OpenAI, Google Gemini, Anthropic, Mistral AI and open-source models via Ollama)
|
409
|
+
* Integrates with various tools to extend functionality
|
410
|
+
* Manages conversation threads
|
411
|
+
* Handles automatic and manual tool execution
|
412
|
+
* Supports different message formats for various LLM providers
|
425
413
|
|
426
|
-
###
|
427
|
-
1. Instantiate an LLM of your choice
|
414
|
+
### Usage
|
428
415
|
```ruby
|
429
416
|
llm = Langchain::LLM::OpenAI.new(api_key: ENV["OPENAI_API_KEY"])
|
430
|
-
```
|
431
|
-
2. Instantiate an Assistant
|
432
|
-
```ruby
|
433
417
|
assistant = Langchain::Assistant.new(
|
434
418
|
llm: llm,
|
435
|
-
instructions: "You
|
436
|
-
tools: [
|
437
|
-
Langchain::Tool::Weather.new(api_key: ENV["OPEN_WEATHER_API_KEY"])
|
438
|
-
]
|
419
|
+
instructions: "You're a helpful AI assistant",
|
420
|
+
tools: [Langchain::Tool::NewsRetriever.new(api_key: ENV["NEWS_API_KEY"])]
|
439
421
|
)
|
440
|
-
```
|
441
|
-
### Using an Assistant
|
442
|
-
You can now add your message to an Assistant.
|
443
|
-
```ruby
|
444
|
-
assistant.add_message content: "What's the weather in New York, New York?"
|
445
|
-
```
|
446
422
|
|
447
|
-
|
448
|
-
|
449
|
-
assistant.run
|
450
|
-
```
|
423
|
+
# Add a user message and run the assistant
|
424
|
+
assistant.add_message_and_run(content: "What's the latest news about AI?")
|
451
425
|
|
452
|
-
|
453
|
-
|
454
|
-
assistant.submit_tool_output tool_call_id: "...", output: "It's 70 degrees and sunny in New York City"
|
455
|
-
```
|
426
|
+
# Access the conversation thread
|
427
|
+
messages = assistant.messages
|
456
428
|
|
457
|
-
|
458
|
-
```ruby
|
459
|
-
assistant.add_message content: "How about San Diego, CA?"
|
429
|
+
# Run the assistant with automatic tool execution
|
460
430
|
assistant.run(auto_tool_execution: true)
|
461
431
|
```
|
462
|
-
|
463
|
-
|
464
|
-
|
432
|
+
|
433
|
+
### Configuration
|
434
|
+
* `llm`: The LLM instance to use (required)
|
435
|
+
* `tools`: An array of tool instances (optional)
|
436
|
+
* `instructions`: System instructions for the assistant (optional)
|
437
|
+
* `tool_choice`: Specifies how tools should be selected. Default: "auto". A specific tool function name can be passed. This will force the Assistant to **always** use this function.
|
438
|
+
|
439
|
+
### Key Methods
|
440
|
+
* `add_message`: Adds a user message to the messages array
|
441
|
+
* `run`: Processes the conversation and generates responses
|
442
|
+
* `add_message_and_run`: Combines adding a message and running the assistant
|
443
|
+
* `submit_tool_output`: Manually submit output to a tool call
|
444
|
+
* `messages`: Returns a list of ongoing messages
|
445
|
+
|
446
|
+
### Built-in Tools 🛠️
|
447
|
+
* `Langchain::Tool::Calculator`: Useful for evaluating math expressions. Requires `gem "eqn"`.
|
448
|
+
* `Langchain::Tool::Database`: Connect your SQL database. Requires `gem "sequel"`.
|
449
|
+
* `Langchain::Tool::FileSystem`: Interact with the file system (read & write).
|
450
|
+
* `Langchain::Tool::RubyCodeInterpreter`: Useful for evaluating generated Ruby code. Requires `gem "safe_ruby"` (In need of a better solution).
|
451
|
+
* `Langchain::Tool::NewsRetriever`: A wrapper around [NewsApi.org](https://newsapi.org) to fetch news articles.
|
452
|
+
* `Langchain::Tool::Tavily`: A wrapper around [Tavily AI](https://tavily.com).
|
453
|
+
* `Langchain::Tool::Weather`: Calls [Open Weather API](https://home.openweathermap.org) to retrieve the current weather.
|
454
|
+
* `Langchain::Tool::Wikipedia`: Calls Wikipedia API.
|
455
|
+
|
456
|
+
### Creating custom Tools
|
457
|
+
The Langchain::Assistant can be easily extended with custom tools by creating classes that `extend Langchain::ToolDefinition` module and implement required methods.
|
458
|
+
```ruby
|
459
|
+
class MovieInfoTool
|
460
|
+
include Langchain::ToolDefinition
|
461
|
+
|
462
|
+
define_function :search_movie, description: "MovieInfoTool: Search for a movie by title" do
|
463
|
+
property :query, type: "string", description: "The movie title to search for", required: true
|
464
|
+
end
|
465
|
+
|
466
|
+
define_function :get_movie_details, description: "MovieInfoTool: Get detailed information about a specific movie" do
|
467
|
+
property :movie_id, type: "integer", description: "The TMDb ID of the movie", required: true
|
468
|
+
end
|
469
|
+
|
470
|
+
def initialize(api_key:)
|
471
|
+
@api_key = api_key
|
472
|
+
end
|
473
|
+
|
474
|
+
def search_movie(query:)
|
475
|
+
...
|
476
|
+
end
|
477
|
+
|
478
|
+
def get_movie_details(movie_id:)
|
479
|
+
...
|
480
|
+
end
|
481
|
+
end
|
465
482
|
```
|
466
483
|
|
467
|
-
|
468
|
-
You can access the messages in a Thread by calling `assistant.thread.messages`.
|
484
|
+
#### Example usage:
|
469
485
|
```ruby
|
470
|
-
|
486
|
+
movie_tool = MovieInfoTool.new(api_key: "...")
|
487
|
+
|
488
|
+
assistant = Langchain::Assistant.new(
|
489
|
+
llm: llm,
|
490
|
+
instructions: "You're a helpful AI assistant that can provide movie information",
|
491
|
+
tools: [movie_tool]
|
492
|
+
)
|
493
|
+
|
494
|
+
assistant.add_message_and_run(content: "Can you tell me about the movie 'Inception'?")
|
495
|
+
# Check the response in the last message in the conversation
|
496
|
+
assistant.messages.last
|
471
497
|
```
|
472
498
|
|
473
|
-
|
499
|
+
### Error Handling
|
500
|
+
The assistant includes error handling for invalid inputs, unsupported LLM types, and tool execution failures. It uses a state machine to manage the conversation flow and handle different scenarios gracefully.
|
501
|
+
|
502
|
+
### Demos
|
503
|
+
1. [Building an AI Assistant that operates a simulated E-commerce Store](https://www.loom.com/share/83aa4fd8dccb492aad4ca95da40ed0b2)
|
504
|
+
2. [New Langchain.rb Assistants interface](https://www.loom.com/share/e883a4a49b8746c1b0acf9d58cf6da36)
|
505
|
+
3. [Langchain.rb Assistant demo with NewsRetriever and function calling on Gemini](https://youtu.be/-ieyahrpDpM&t=1477s) - [code](https://github.com/palladius/gemini-news-crawler)
|
474
506
|
|
475
507
|
## Evaluations (Evals)
|
476
508
|
The Evaluations module is a collection of tools that can be used to evaluate and track the performance of the output products by LLM and your RAG (Retrieval Augmented Generation) pipelines.
|
@@ -15,7 +15,7 @@ module Langchain
|
|
15
15
|
extend Forwardable
|
16
16
|
def_delegators :thread, :messages
|
17
17
|
|
18
|
-
attr_reader :llm, :thread, :instructions, :state
|
18
|
+
attr_reader :llm, :thread, :instructions, :state, :llm_adapter, :tool_choice
|
19
19
|
attr_reader :total_prompt_tokens, :total_completion_tokens, :total_tokens
|
20
20
|
attr_accessor :tools
|
21
21
|
|
@@ -29,7 +29,8 @@ module Langchain
|
|
29
29
|
llm:,
|
30
30
|
thread: nil,
|
31
31
|
tools: [],
|
32
|
-
instructions: nil
|
32
|
+
instructions: nil,
|
33
|
+
tool_choice: "auto"
|
33
34
|
)
|
34
35
|
unless tools.is_a?(Array) && tools.all? { |tool| tool.class.singleton_class.included_modules.include?(Langchain::ToolDefinition) }
|
35
36
|
raise ArgumentError, "Tools must be an array of objects extending Langchain::ToolDefinition"
|
@@ -39,6 +40,7 @@ module Langchain
|
|
39
40
|
@llm_adapter = LLM::Adapter.build(llm)
|
40
41
|
@thread = thread || Langchain::Thread.new
|
41
42
|
@tools = tools
|
43
|
+
self.tool_choice = tool_choice
|
42
44
|
@instructions = instructions
|
43
45
|
@state = :ready
|
44
46
|
|
@@ -150,8 +152,21 @@ module Langchain
|
|
150
152
|
thread.messages.unshift(message)
|
151
153
|
end
|
152
154
|
|
155
|
+
def tool_choice=(new_tool_choice)
|
156
|
+
validate_tool_choice!(new_tool_choice)
|
157
|
+
@tool_choice = new_tool_choice
|
158
|
+
end
|
159
|
+
|
153
160
|
private
|
154
161
|
|
162
|
+
# TODO: If tool_choice = "tool_function_name" and then tool is removed from the assistant, should we set tool_choice back to "auto"?
|
163
|
+
def validate_tool_choice!(tool_choice)
|
164
|
+
allowed_tool_choices = llm_adapter.allowed_tool_choices.concat(available_tool_names)
|
165
|
+
unless allowed_tool_choices.include?(tool_choice)
|
166
|
+
raise ArgumentError, "Tool choice must be one of: #{allowed_tool_choices.join(", ")}"
|
167
|
+
end
|
168
|
+
end
|
169
|
+
|
155
170
|
# Check if the run is finished
|
156
171
|
#
|
157
172
|
# @param auto_tool_execution [Boolean] Whether or not to automatically run tools
|
@@ -257,19 +272,21 @@ module Langchain
|
|
257
272
|
# @return [String] The tool role
|
258
273
|
def determine_tool_role
|
259
274
|
case llm
|
275
|
+
when Langchain::LLM::Anthropic
|
276
|
+
Langchain::Messages::AnthropicMessage::TOOL_ROLE
|
277
|
+
when Langchain::LLM::GoogleGemini, Langchain::LLM::GoogleVertexAI
|
278
|
+
Langchain::Messages::GoogleGeminiMessage::TOOL_ROLE
|
279
|
+
when Langchain::LLM::MistralAI
|
280
|
+
Langchain::Messages::MistralAIMessage::TOOL_ROLE
|
260
281
|
when Langchain::LLM::Ollama
|
261
282
|
Langchain::Messages::OllamaMessage::TOOL_ROLE
|
262
283
|
when Langchain::LLM::OpenAI
|
263
284
|
Langchain::Messages::OpenAIMessage::TOOL_ROLE
|
264
|
-
when Langchain::LLM::GoogleGemini, Langchain::LLM::GoogleVertexAI
|
265
|
-
Langchain::Messages::GoogleGeminiMessage::TOOL_ROLE
|
266
|
-
when Langchain::LLM::Anthropic
|
267
|
-
Langchain::Messages::AnthropicMessage::TOOL_ROLE
|
268
285
|
end
|
269
286
|
end
|
270
287
|
|
271
288
|
def initialize_instructions
|
272
|
-
if llm.is_a?(Langchain::LLM::OpenAI)
|
289
|
+
if llm.is_a?(Langchain::LLM::OpenAI) || llm.is_a?(Langchain::LLM::MistralAI)
|
273
290
|
add_message(role: "system", content: instructions) if instructions
|
274
291
|
end
|
275
292
|
end
|
@@ -281,9 +298,10 @@ module Langchain
|
|
281
298
|
Langchain.logger.info("Sending a call to #{llm.class}", for: self.class)
|
282
299
|
|
283
300
|
params = @llm_adapter.build_chat_params(
|
284
|
-
tools: @tools,
|
285
301
|
instructions: @instructions,
|
286
|
-
messages: thread.array_of_message_hashes
|
302
|
+
messages: thread.array_of_message_hashes,
|
303
|
+
tools: @tools,
|
304
|
+
tool_choice: tool_choice
|
287
305
|
)
|
288
306
|
@llm.chat(**params)
|
289
307
|
end
|
@@ -298,7 +316,7 @@ module Langchain
|
|
298
316
|
|
299
317
|
tool_instance = tools.find do |t|
|
300
318
|
t.class.tool_name == tool_name
|
301
|
-
end or raise ArgumentError, "Tool not found in assistant.tools"
|
319
|
+
end or raise ArgumentError, "Tool: #{tool_name} not found in assistant.tools"
|
302
320
|
|
303
321
|
output = tool_instance.send(method_name, **tool_arguments)
|
304
322
|
|
@@ -329,20 +347,26 @@ module Langchain
|
|
329
347
|
@total_tokens += total_tokens_from_operation if total_tokens_from_operation
|
330
348
|
end
|
331
349
|
|
350
|
+
def available_tool_names
|
351
|
+
llm_adapter.available_tool_names(tools)
|
352
|
+
end
|
353
|
+
|
332
354
|
# TODO: Fix the message truncation when context window is exceeded
|
333
355
|
|
334
356
|
module LLM
|
335
357
|
class Adapter
|
336
358
|
def self.build(llm)
|
337
359
|
case llm
|
360
|
+
when Langchain::LLM::Anthropic
|
361
|
+
Adapters::Anthropic.new
|
362
|
+
when Langchain::LLM::GoogleGemini, Langchain::LLM::GoogleVertexAI
|
363
|
+
Adapters::GoogleGemini.new
|
364
|
+
when Langchain::LLM::MistralAI
|
365
|
+
Adapters::MistralAI.new
|
338
366
|
when Langchain::LLM::Ollama
|
339
367
|
Adapters::Ollama.new
|
340
368
|
when Langchain::LLM::OpenAI
|
341
369
|
Adapters::OpenAI.new
|
342
|
-
when Langchain::LLM::GoogleGemini, Langchain::LLM::GoogleVertexAI
|
343
|
-
Adapters::GoogleGemini.new
|
344
|
-
when Langchain::LLM::Anthropic
|
345
|
-
Adapters::Anthropic.new
|
346
370
|
else
|
347
371
|
raise ArgumentError, "Unsupported LLM type: #{llm.class}"
|
348
372
|
end
|
@@ -351,7 +375,7 @@ module Langchain
|
|
351
375
|
|
352
376
|
module Adapters
|
353
377
|
class Base
|
354
|
-
def build_chat_params(tools:, instructions:, messages:)
|
378
|
+
def build_chat_params(tools:, instructions:, messages:, tool_choice:)
|
355
379
|
raise NotImplementedError, "Subclasses must implement build_chat_params"
|
356
380
|
end
|
357
381
|
|
@@ -365,10 +389,10 @@ module Langchain
|
|
365
389
|
end
|
366
390
|
|
367
391
|
class Ollama < Base
|
368
|
-
def build_chat_params(tools:, instructions:, messages:)
|
392
|
+
def build_chat_params(tools:, instructions:, messages:, tool_choice:)
|
369
393
|
params = {messages: messages}
|
370
394
|
if tools.any?
|
371
|
-
params[:tools] = tools
|
395
|
+
params[:tools] = build_tools(tools)
|
372
396
|
end
|
373
397
|
params
|
374
398
|
end
|
@@ -396,14 +420,28 @@ module Langchain
|
|
396
420
|
|
397
421
|
[tool_call_id, tool_name, method_name, tool_arguments]
|
398
422
|
end
|
423
|
+
|
424
|
+
def available_tool_names(tools)
|
425
|
+
build_tools(tools).map { |tool| tool.dig(:function, :name) }
|
426
|
+
end
|
427
|
+
|
428
|
+
def allowed_tool_choices
|
429
|
+
["auto", "none"]
|
430
|
+
end
|
431
|
+
|
432
|
+
private
|
433
|
+
|
434
|
+
def build_tools(tools)
|
435
|
+
tools.map { |tool| tool.class.function_schemas.to_openai_format }.flatten
|
436
|
+
end
|
399
437
|
end
|
400
438
|
|
401
439
|
class OpenAI < Base
|
402
|
-
def build_chat_params(tools:, instructions:, messages:)
|
440
|
+
def build_chat_params(tools:, instructions:, messages:, tool_choice:)
|
403
441
|
params = {messages: messages}
|
404
442
|
if tools.any?
|
405
|
-
params[:tools] = tools
|
406
|
-
params[:tool_choice] =
|
443
|
+
params[:tools] = build_tools(tools)
|
444
|
+
params[:tool_choice] = build_tool_choice(tool_choice)
|
407
445
|
end
|
408
446
|
params
|
409
447
|
end
|
@@ -431,15 +469,96 @@ module Langchain
|
|
431
469
|
|
432
470
|
[tool_call_id, tool_name, method_name, tool_arguments]
|
433
471
|
end
|
472
|
+
|
473
|
+
def build_tools(tools)
|
474
|
+
tools.map { |tool| tool.class.function_schemas.to_openai_format }.flatten
|
475
|
+
end
|
476
|
+
|
477
|
+
def allowed_tool_choices
|
478
|
+
["auto", "none"]
|
479
|
+
end
|
480
|
+
|
481
|
+
def available_tool_names(tools)
|
482
|
+
build_tools(tools).map { |tool| tool.dig(:function, :name) }
|
483
|
+
end
|
484
|
+
|
485
|
+
private
|
486
|
+
|
487
|
+
def build_tool_choice(choice)
|
488
|
+
case choice
|
489
|
+
when "auto"
|
490
|
+
choice
|
491
|
+
else
|
492
|
+
{"type" => "function", "function" => {"name" => choice}}
|
493
|
+
end
|
494
|
+
end
|
495
|
+
end
|
496
|
+
|
497
|
+
class MistralAI < Base
|
498
|
+
def build_chat_params(tools:, instructions:, messages:, tool_choice:)
|
499
|
+
params = {messages: messages}
|
500
|
+
if tools.any?
|
501
|
+
params[:tools] = build_tools(tools)
|
502
|
+
params[:tool_choice] = build_tool_choice(tool_choice)
|
503
|
+
end
|
504
|
+
params
|
505
|
+
end
|
506
|
+
|
507
|
+
def build_message(role:, content: nil, tool_calls: [], tool_call_id: nil)
|
508
|
+
Langchain::Messages::MistralAIMessage.new(role: role, content: content, tool_calls: tool_calls, tool_call_id: tool_call_id)
|
509
|
+
end
|
510
|
+
|
511
|
+
# Extract the tool call information from the OpenAI tool call hash
|
512
|
+
#
|
513
|
+
# @param tool_call [Hash] The tool call hash
|
514
|
+
# @return [Array] The tool call information
|
515
|
+
def extract_tool_call_args(tool_call:)
|
516
|
+
tool_call_id = tool_call.dig("id")
|
517
|
+
|
518
|
+
function_name = tool_call.dig("function", "name")
|
519
|
+
tool_name, method_name = function_name.split("__")
|
520
|
+
|
521
|
+
tool_arguments = tool_call.dig("function", "arguments")
|
522
|
+
tool_arguments = if tool_arguments.is_a?(Hash)
|
523
|
+
Langchain::Utils::HashTransformer.symbolize_keys(tool_arguments)
|
524
|
+
else
|
525
|
+
JSON.parse(tool_arguments, symbolize_names: true)
|
526
|
+
end
|
527
|
+
|
528
|
+
[tool_call_id, tool_name, method_name, tool_arguments]
|
529
|
+
end
|
530
|
+
|
531
|
+
def build_tools(tools)
|
532
|
+
tools.map { |tool| tool.class.function_schemas.to_openai_format }.flatten
|
533
|
+
end
|
534
|
+
|
535
|
+
def allowed_tool_choices
|
536
|
+
["auto", "none"]
|
537
|
+
end
|
538
|
+
|
539
|
+
def available_tool_names(tools)
|
540
|
+
build_tools(tools).map { |tool| tool.dig(:function, :name) }
|
541
|
+
end
|
542
|
+
|
543
|
+
private
|
544
|
+
|
545
|
+
def build_tool_choice(choice)
|
546
|
+
case choice
|
547
|
+
when "auto"
|
548
|
+
choice
|
549
|
+
else
|
550
|
+
{"type" => "function", "function" => {"name" => choice}}
|
551
|
+
end
|
552
|
+
end
|
434
553
|
end
|
435
554
|
|
436
555
|
class GoogleGemini < Base
|
437
|
-
def build_chat_params(tools:, instructions:, messages:)
|
556
|
+
def build_chat_params(tools:, instructions:, messages:, tool_choice:)
|
438
557
|
params = {messages: messages}
|
439
558
|
if tools.any?
|
440
|
-
params[:tools] = tools
|
559
|
+
params[:tools] = build_tools(tools)
|
441
560
|
params[:system] = instructions if instructions
|
442
|
-
params[:tool_choice] =
|
561
|
+
params[:tool_choice] = build_tool_config(tool_choice)
|
443
562
|
end
|
444
563
|
params
|
445
564
|
end
|
@@ -459,14 +578,39 @@ module Langchain
|
|
459
578
|
tool_arguments = tool_call.dig("functionCall", "args").transform_keys(&:to_sym)
|
460
579
|
[tool_call_id, tool_name, method_name, tool_arguments]
|
461
580
|
end
|
581
|
+
|
582
|
+
def build_tools(tools)
|
583
|
+
tools.map { |tool| tool.class.function_schemas.to_google_gemini_format }.flatten
|
584
|
+
end
|
585
|
+
|
586
|
+
def allowed_tool_choices
|
587
|
+
["auto", "none"]
|
588
|
+
end
|
589
|
+
|
590
|
+
def available_tool_names(tools)
|
591
|
+
build_tools(tools).map { |tool| tool.dig(:name) }
|
592
|
+
end
|
593
|
+
|
594
|
+
private
|
595
|
+
|
596
|
+
def build_tool_config(choice)
|
597
|
+
case choice
|
598
|
+
when "auto"
|
599
|
+
{function_calling_config: {mode: "auto"}}
|
600
|
+
when "none"
|
601
|
+
{function_calling_config: {mode: "none"}}
|
602
|
+
else
|
603
|
+
{function_calling_config: {mode: "any", allowed_function_names: [choice]}}
|
604
|
+
end
|
605
|
+
end
|
462
606
|
end
|
463
607
|
|
464
608
|
class Anthropic < Base
|
465
|
-
def build_chat_params(tools:, instructions:, messages:)
|
609
|
+
def build_chat_params(tools:, instructions:, messages:, tool_choice:)
|
466
610
|
params = {messages: messages}
|
467
611
|
if tools.any?
|
468
|
-
params[:tools] = tools
|
469
|
-
params[:tool_choice] =
|
612
|
+
params[:tools] = build_tools(tools)
|
613
|
+
params[:tool_choice] = build_tool_choice(tool_choice)
|
470
614
|
end
|
471
615
|
params[:system] = instructions if instructions
|
472
616
|
params
|
@@ -487,6 +631,31 @@ module Langchain
|
|
487
631
|
tool_arguments = tool_call.dig("input").transform_keys(&:to_sym)
|
488
632
|
[tool_call_id, tool_name, method_name, tool_arguments]
|
489
633
|
end
|
634
|
+
|
635
|
+
def build_tools(tools)
|
636
|
+
tools.map { |tool| tool.class.function_schemas.to_anthropic_format }.flatten
|
637
|
+
end
|
638
|
+
|
639
|
+
def allowed_tool_choices
|
640
|
+
["auto", "any"]
|
641
|
+
end
|
642
|
+
|
643
|
+
def available_tool_names(tools)
|
644
|
+
build_tools(tools).map { |tool| tool.dig(:name) }
|
645
|
+
end
|
646
|
+
|
647
|
+
private
|
648
|
+
|
649
|
+
def build_tool_choice(choice)
|
650
|
+
case choice
|
651
|
+
when "auto"
|
652
|
+
{type: "auto"}
|
653
|
+
when "any"
|
654
|
+
{type: "any"}
|
655
|
+
else
|
656
|
+
{type: "tool", name: choice}
|
657
|
+
end
|
658
|
+
end
|
490
659
|
end
|
491
660
|
end
|
492
661
|
end
|
@@ -0,0 +1,74 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Langchain
|
4
|
+
module Messages
|
5
|
+
class MistralAIMessage < Base
|
6
|
+
# MistralAI uses the following roles:
|
7
|
+
ROLES = [
|
8
|
+
"system",
|
9
|
+
"assistant",
|
10
|
+
"user",
|
11
|
+
"tool"
|
12
|
+
].freeze
|
13
|
+
|
14
|
+
TOOL_ROLE = "tool"
|
15
|
+
|
16
|
+
# Initialize a new MistralAI message
|
17
|
+
#
|
18
|
+
# @param [String] The role of the message
|
19
|
+
# @param [String] The content of the message
|
20
|
+
# @param [Array<Hash>] The tool calls made in the message
|
21
|
+
# @param [String] The ID of the tool call
|
22
|
+
def initialize(role:, content: nil, tool_calls: [], tool_call_id: nil) # TODO: Implement image_file: reference (https://platform.openai.com/docs/api-reference/messages/object#messages/object-content)
|
23
|
+
raise ArgumentError, "Role must be one of #{ROLES.join(", ")}" unless ROLES.include?(role)
|
24
|
+
raise ArgumentError, "Tool calls must be an array of hashes" unless tool_calls.is_a?(Array) && tool_calls.all? { |tool_call| tool_call.is_a?(Hash) }
|
25
|
+
|
26
|
+
@role = role
|
27
|
+
# Some Tools return content as a JSON hence `.to_s`
|
28
|
+
@content = content.to_s
|
29
|
+
@tool_calls = tool_calls
|
30
|
+
@tool_call_id = tool_call_id
|
31
|
+
end
|
32
|
+
|
33
|
+
# Check if the message came from an LLM
|
34
|
+
#
|
35
|
+
# @return [Boolean] true/false whether this message was produced by an LLM
|
36
|
+
def llm?
|
37
|
+
assistant?
|
38
|
+
end
|
39
|
+
|
40
|
+
# Convert the message to an MistralAI API-compatible hash
|
41
|
+
#
|
42
|
+
# @return [Hash] The message as an MistralAI API-compatible hash
|
43
|
+
def to_hash
|
44
|
+
{}.tap do |h|
|
45
|
+
h[:role] = role
|
46
|
+
h[:content] = content if content # Content is nil for tool calls
|
47
|
+
h[:tool_calls] = tool_calls if tool_calls.any?
|
48
|
+
h[:tool_call_id] = tool_call_id if tool_call_id
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
# Check if the message came from an LLM
|
53
|
+
#
|
54
|
+
# @return [Boolean] true/false whether this message was produced by an LLM
|
55
|
+
def assistant?
|
56
|
+
role == "assistant"
|
57
|
+
end
|
58
|
+
|
59
|
+
# Check if the message are system instructions
|
60
|
+
#
|
61
|
+
# @return [Boolean] true/false whether this message are system instructions
|
62
|
+
def system?
|
63
|
+
role == "system"
|
64
|
+
end
|
65
|
+
|
66
|
+
# Check if the message is a tool call
|
67
|
+
#
|
68
|
+
# @return [Boolean] true/false whether this message is a tool call
|
69
|
+
def tool?
|
70
|
+
role == "tool"
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|
data/lib/langchain/llm/azure.rb
CHANGED
@@ -33,8 +33,11 @@ module Langchain::LLM
|
|
33
33
|
)
|
34
34
|
@defaults = DEFAULTS.merge(default_options)
|
35
35
|
chat_parameters.update(
|
36
|
+
model: {default: @defaults[:chat_completion_model_name]},
|
36
37
|
logprobs: {},
|
37
38
|
top_logprobs: {},
|
39
|
+
n: {default: @defaults[:n]},
|
40
|
+
temperature: {default: @defaults[:temperature]},
|
38
41
|
user: {}
|
39
42
|
)
|
40
43
|
chat_parameters.ignore(:top_k)
|
@@ -39,7 +39,6 @@ module Langchain::LLM
|
|
39
39
|
def chat(params = {})
|
40
40
|
params[:system] = {parts: [{text: params[:system]}]} if params[:system]
|
41
41
|
params[:tools] = {function_declarations: params[:tools]} if params[:tools]
|
42
|
-
params[:tool_choice] = {function_calling_config: {mode: params[:tool_choice].upcase}} if params[:tool_choice]
|
43
42
|
|
44
43
|
raise ArgumentError.new("messages argument is required") if Array(params[:messages]).empty?
|
45
44
|
|
@@ -8,7 +8,7 @@ module Langchain::LLM
|
|
8
8
|
# llm = Langchain::LLM::MistralAI.new(api_key: ENV["MISTRAL_AI_API_KEY"])
|
9
9
|
class MistralAI < Base
|
10
10
|
DEFAULTS = {
|
11
|
-
chat_completion_model_name: "mistral-
|
11
|
+
chat_completion_model_name: "mistral-large-latest",
|
12
12
|
embeddings_model_name: "mistral-embed"
|
13
13
|
}.freeze
|
14
14
|
|
data/lib/langchain/llm/ollama.rb
CHANGED
@@ -218,8 +218,8 @@ module Langchain::LLM
|
|
218
218
|
top_p: nil
|
219
219
|
)
|
220
220
|
parameters = {
|
221
|
-
|
222
|
-
|
221
|
+
model: model,
|
222
|
+
input: Array(text)
|
223
223
|
}.compact
|
224
224
|
|
225
225
|
llm_parameters = {
|
@@ -243,7 +243,7 @@ module Langchain::LLM
|
|
243
243
|
|
244
244
|
parameters[:options] = llm_parameters.compact
|
245
245
|
|
246
|
-
response = client.post("api/
|
246
|
+
response = client.post("api/embed") do |req|
|
247
247
|
req.body = parameters
|
248
248
|
end
|
249
249
|
|
@@ -7,7 +7,15 @@ module Langchain::LLM
|
|
7
7
|
end
|
8
8
|
|
9
9
|
def chat_completion
|
10
|
-
|
10
|
+
chat_completions.dig(0, "message", "content")
|
11
|
+
end
|
12
|
+
|
13
|
+
def chat_completions
|
14
|
+
raw_response.dig("choices")
|
15
|
+
end
|
16
|
+
|
17
|
+
def tool_calls
|
18
|
+
chat_completions.dig(0, "message", "tool_calls") || []
|
11
19
|
end
|
12
20
|
|
13
21
|
def role
|
@@ -1,6 +1,8 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
module Langchain::Tool
|
2
4
|
#
|
3
|
-
# Connects to a database, executes SQL queries, and outputs DB schema for Agents to use
|
5
|
+
# Connects to a SQL database, executes SQL queries, and outputs DB schema for Agents to use
|
4
6
|
#
|
5
7
|
# Gem requirements:
|
6
8
|
# gem "sequel", "~> 5.68.0"
|
@@ -15,7 +17,9 @@ module Langchain::Tool
|
|
15
17
|
define_function :list_tables, description: "Database Tool: Returns a list of tables in the database"
|
16
18
|
|
17
19
|
define_function :describe_tables, description: "Database Tool: Returns the schema for a list of tables" do
|
18
|
-
property :tables, type: "
|
20
|
+
property :tables, type: "array", description: "The tables to describe", required: true do
|
21
|
+
item type: "string"
|
22
|
+
end
|
19
23
|
end
|
20
24
|
|
21
25
|
define_function :dump_schema, description: "Database Tool: Returns the database schema"
|
@@ -38,25 +42,32 @@ module Langchain::Tool
|
|
38
42
|
raise StandardError, "connection_string parameter cannot be blank" if connection_string.empty?
|
39
43
|
|
40
44
|
@db = Sequel.connect(connection_string)
|
45
|
+
# TODO: This is a bug, these 2 parameters are completely ignored.
|
41
46
|
@requested_tables = tables
|
42
47
|
@excluded_tables = exclude_tables
|
43
48
|
end
|
44
49
|
|
45
50
|
# Database Tool: Returns a list of tables in the database
|
51
|
+
#
|
52
|
+
# @return [Array<Symbol>] List of tables in the database
|
46
53
|
def list_tables
|
47
54
|
db.tables
|
48
55
|
end
|
49
56
|
|
50
57
|
# Database Tool: Returns the schema for a list of tables
|
51
58
|
#
|
52
|
-
# @param tables [String] The tables to describe.
|
53
|
-
# @return [String]
|
54
|
-
def describe_tables(tables:)
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
59
|
+
# @param tables [Array<String>] The tables to describe.
|
60
|
+
# @return [String] The schema for the tables
|
61
|
+
def describe_tables(tables: [])
|
62
|
+
return "No tables specified" if tables.empty?
|
63
|
+
|
64
|
+
Langchain.logger.info("Describing tables: #{tables}", for: self.class)
|
65
|
+
|
66
|
+
tables
|
67
|
+
.map do |table|
|
68
|
+
describe_table(table)
|
69
|
+
end
|
70
|
+
.join("\n")
|
60
71
|
end
|
61
72
|
|
62
73
|
# Database Tool: Returns the database schema
|
@@ -64,18 +75,39 @@ module Langchain::Tool
|
|
64
75
|
# @return [String] Database schema
|
65
76
|
def dump_schema
|
66
77
|
Langchain.logger.info("Dumping schema tables and keys", for: self.class)
|
67
|
-
|
68
|
-
db.tables.
|
69
|
-
describe_table(table
|
78
|
+
|
79
|
+
schemas = db.tables.map do |table|
|
80
|
+
describe_table(table)
|
70
81
|
end
|
71
|
-
|
82
|
+
schemas.join("\n")
|
72
83
|
end
|
73
84
|
|
74
|
-
|
85
|
+
# Database Tool: Executes a SQL query and returns the results
|
86
|
+
#
|
87
|
+
# @param input [String] SQL query to be executed
|
88
|
+
# @return [Array] Results from the SQL query
|
89
|
+
def execute(input:)
|
90
|
+
Langchain.logger.info("Executing \"#{input}\"", for: self.class)
|
91
|
+
|
92
|
+
db[input].to_a
|
93
|
+
rescue Sequel::DatabaseError => e
|
94
|
+
Langchain.logger.error(e.message, for: self.class)
|
95
|
+
e.message # Return error to LLM
|
96
|
+
end
|
97
|
+
|
98
|
+
private
|
99
|
+
|
100
|
+
# Describes a table and its schema
|
101
|
+
#
|
102
|
+
# @param table [String] The table to describe
|
103
|
+
# @return [String] The schema for the table
|
104
|
+
def describe_table(table)
|
105
|
+
# TODO: There's probably a clear way to do all of this below
|
106
|
+
|
75
107
|
primary_key_columns = []
|
76
108
|
primary_key_column_count = db.schema(table).count { |column| column[1][:primary_key] == true }
|
77
109
|
|
78
|
-
schema
|
110
|
+
schema = "CREATE TABLE #{table}(\n"
|
79
111
|
db.schema(table).each do |column|
|
80
112
|
schema << "#{column[0]} #{column[1][:type]}"
|
81
113
|
if column[1][:primary_key] == true
|
@@ -95,17 +127,5 @@ module Langchain::Tool
|
|
95
127
|
end
|
96
128
|
schema << ");\n"
|
97
129
|
end
|
98
|
-
|
99
|
-
# Database Tool: Executes a SQL query and returns the results
|
100
|
-
#
|
101
|
-
# @param input [String] SQL query to be executed
|
102
|
-
# @return [Array] Results from the SQL query
|
103
|
-
def execute(input:)
|
104
|
-
Langchain.logger.info("Executing \"#{input}\"", for: self.class)
|
105
|
-
|
106
|
-
db[input].to_a
|
107
|
-
rescue Sequel::DatabaseError => e
|
108
|
-
Langchain.logger.error(e.message, for: self.class)
|
109
|
-
end
|
110
130
|
end
|
111
131
|
end
|
data/lib/langchain/version.rb
CHANGED
data/lib/langchain.rb
CHANGED
@@ -18,6 +18,7 @@ loader.inflector.inflect(
|
|
18
18
|
"llm" => "LLM",
|
19
19
|
"mistral_ai" => "MistralAI",
|
20
20
|
"mistral_ai_response" => "MistralAIResponse",
|
21
|
+
"mistral_ai_message" => "MistralAIMessage",
|
21
22
|
"openai" => "OpenAI",
|
22
23
|
"openai_validator" => "OpenAIValidator",
|
23
24
|
"openai_response" => "OpenAIResponse",
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: langchainrb
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.15.
|
4
|
+
version: 0.15.4
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Andrei Bondarev
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-08-
|
11
|
+
date: 2024-08-30 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: baran
|
@@ -669,6 +669,7 @@ files:
|
|
669
669
|
- lib/langchain/assistants/messages/anthropic_message.rb
|
670
670
|
- lib/langchain/assistants/messages/base.rb
|
671
671
|
- lib/langchain/assistants/messages/google_gemini_message.rb
|
672
|
+
- lib/langchain/assistants/messages/mistral_ai_message.rb
|
672
673
|
- lib/langchain/assistants/messages/ollama_message.rb
|
673
674
|
- lib/langchain/assistants/messages/openai_message.rb
|
674
675
|
- lib/langchain/assistants/thread.rb
|