langchainrb 0.17.0 → 0.17.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +4 -0
- data/lib/langchain/assistants/assistant.rb +2 -315
- data/lib/langchain/assistants/llm/adapter.rb +27 -0
- data/lib/langchain/assistants/llm/adapters/_base.rb +21 -0
- data/lib/langchain/assistants/llm/adapters/anthropic.rb +62 -0
- data/lib/langchain/assistants/llm/adapters/google_gemini.rb +62 -0
- data/lib/langchain/assistants/llm/adapters/mistral_ai.rb +65 -0
- data/lib/langchain/assistants/llm/adapters/ollama.rb +57 -0
- data/lib/langchain/assistants/llm/adapters/openai.rb +65 -0
- data/lib/langchain/llm/anthropic.rb +2 -1
- data/lib/langchain/tool/database.rb +1 -1
- data/lib/langchain/version.rb +1 -1
- metadata +9 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: f7061ef2090d35626239ca575b60edb291dbbadab7de85a5a2796792e1691437
|
4
|
+
data.tar.gz: 30cb1f14b602a22e7df8f2dba42660383d44482cbe83fb35dc9539afa836739c
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: dd08fb29bd0ff9237cc27980c3bac607baeb9d54a93f297b1e81fb863b7cbb9720db4adacb3dae92bcbe71d2eb59b38d4de0ee321face467a0b82bde627d2929
|
7
|
+
data.tar.gz: 4a3661afd2d9d75a64e02f6f173cd0bf0e016207c444ca4506bab907f00dc906f5bbf82c96aad9521280265f214b0f3e82dd5e4ee54dc40f3afb415a6f50b365
|
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,9 @@
|
|
1
1
|
## [Unreleased]
|
2
2
|
|
3
|
+
## [0.17.1] - 2024-10-07
|
4
|
+
- Move Langchain::Assistant::LLM::Adapter-related classes to separate files
|
5
|
+
- Fix Langchain::Tool::Database#describe_table method
|
6
|
+
|
3
7
|
## [0.17.0] - 2024-10-02
|
4
8
|
- [BREAKING] Langchain::Vectorsearch::Milvus was rewritten to work with newer milvus 0.10.0 gem
|
5
9
|
- [BREAKING] Removing Langchain::LLM::GooglePalm
|
@@ -1,5 +1,7 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
+
require_relative "llm/adapter"
|
4
|
+
|
3
5
|
module Langchain
|
4
6
|
# Assistants are Agent-like objects that leverage helpful instructions, LLMs, tools and knowledge to respond to user queries.
|
5
7
|
# Assistants can be configured with an LLM of your choice, any vector search database and easily extended with additional tools.
|
@@ -412,320 +414,5 @@ module Langchain
|
|
412
414
|
def available_tool_names
|
413
415
|
llm_adapter.available_tool_names(tools)
|
414
416
|
end
|
415
|
-
|
416
|
-
# TODO: Fix the message truncation when context window is exceeded
|
417
|
-
|
418
|
-
module LLM
|
419
|
-
class Adapter
|
420
|
-
def self.build(llm)
|
421
|
-
case llm
|
422
|
-
when Langchain::LLM::Anthropic
|
423
|
-
Adapters::Anthropic.new
|
424
|
-
when Langchain::LLM::GoogleGemini, Langchain::LLM::GoogleVertexAI
|
425
|
-
Adapters::GoogleGemini.new
|
426
|
-
when Langchain::LLM::MistralAI
|
427
|
-
Adapters::MistralAI.new
|
428
|
-
when Langchain::LLM::Ollama
|
429
|
-
Adapters::Ollama.new
|
430
|
-
when Langchain::LLM::OpenAI
|
431
|
-
Adapters::OpenAI.new
|
432
|
-
else
|
433
|
-
raise ArgumentError, "Unsupported LLM type: #{llm.class}"
|
434
|
-
end
|
435
|
-
end
|
436
|
-
end
|
437
|
-
|
438
|
-
module Adapters
|
439
|
-
class Base
|
440
|
-
def build_chat_params(tools:, instructions:, messages:, tool_choice:)
|
441
|
-
raise NotImplementedError, "Subclasses must implement build_chat_params"
|
442
|
-
end
|
443
|
-
|
444
|
-
def extract_tool_call_args(tool_call:)
|
445
|
-
raise NotImplementedError, "Subclasses must implement extract_tool_call_args"
|
446
|
-
end
|
447
|
-
|
448
|
-
def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil)
|
449
|
-
raise NotImplementedError, "Subclasses must implement build_message"
|
450
|
-
end
|
451
|
-
end
|
452
|
-
|
453
|
-
class Ollama < Base
|
454
|
-
def build_chat_params(tools:, instructions:, messages:, tool_choice:)
|
455
|
-
params = {messages: messages}
|
456
|
-
if tools.any?
|
457
|
-
params[:tools] = build_tools(tools)
|
458
|
-
end
|
459
|
-
params
|
460
|
-
end
|
461
|
-
|
462
|
-
def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil)
|
463
|
-
warn "Image URL is not supported by Ollama currently" if image_url
|
464
|
-
|
465
|
-
Langchain::Messages::OllamaMessage.new(role: role, content: content, tool_calls: tool_calls, tool_call_id: tool_call_id)
|
466
|
-
end
|
467
|
-
|
468
|
-
# Extract the tool call information from the OpenAI tool call hash
|
469
|
-
#
|
470
|
-
# @param tool_call [Hash] The tool call hash
|
471
|
-
# @return [Array] The tool call information
|
472
|
-
def extract_tool_call_args(tool_call:)
|
473
|
-
tool_call_id = tool_call.dig("id")
|
474
|
-
|
475
|
-
function_name = tool_call.dig("function", "name")
|
476
|
-
tool_name, method_name = function_name.split("__")
|
477
|
-
|
478
|
-
tool_arguments = tool_call.dig("function", "arguments")
|
479
|
-
tool_arguments = if tool_arguments.is_a?(Hash)
|
480
|
-
Langchain::Utils::HashTransformer.symbolize_keys(tool_arguments)
|
481
|
-
else
|
482
|
-
JSON.parse(tool_arguments, symbolize_names: true)
|
483
|
-
end
|
484
|
-
|
485
|
-
[tool_call_id, tool_name, method_name, tool_arguments]
|
486
|
-
end
|
487
|
-
|
488
|
-
def available_tool_names(tools)
|
489
|
-
build_tools(tools).map { |tool| tool.dig(:function, :name) }
|
490
|
-
end
|
491
|
-
|
492
|
-
def allowed_tool_choices
|
493
|
-
["auto", "none"]
|
494
|
-
end
|
495
|
-
|
496
|
-
private
|
497
|
-
|
498
|
-
def build_tools(tools)
|
499
|
-
tools.map { |tool| tool.class.function_schemas.to_openai_format }.flatten
|
500
|
-
end
|
501
|
-
end
|
502
|
-
|
503
|
-
class OpenAI < Base
|
504
|
-
def build_chat_params(tools:, instructions:, messages:, tool_choice:)
|
505
|
-
params = {messages: messages}
|
506
|
-
if tools.any?
|
507
|
-
params[:tools] = build_tools(tools)
|
508
|
-
params[:tool_choice] = build_tool_choice(tool_choice)
|
509
|
-
end
|
510
|
-
params
|
511
|
-
end
|
512
|
-
|
513
|
-
def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil)
|
514
|
-
Langchain::Messages::OpenAIMessage.new(role: role, content: content, image_url: image_url, tool_calls: tool_calls, tool_call_id: tool_call_id)
|
515
|
-
end
|
516
|
-
|
517
|
-
# Extract the tool call information from the OpenAI tool call hash
|
518
|
-
#
|
519
|
-
# @param tool_call [Hash] The tool call hash
|
520
|
-
# @return [Array] The tool call information
|
521
|
-
def extract_tool_call_args(tool_call:)
|
522
|
-
tool_call_id = tool_call.dig("id")
|
523
|
-
|
524
|
-
function_name = tool_call.dig("function", "name")
|
525
|
-
tool_name, method_name = function_name.split("__")
|
526
|
-
|
527
|
-
tool_arguments = tool_call.dig("function", "arguments")
|
528
|
-
tool_arguments = if tool_arguments.is_a?(Hash)
|
529
|
-
Langchain::Utils::HashTransformer.symbolize_keys(tool_arguments)
|
530
|
-
else
|
531
|
-
JSON.parse(tool_arguments, symbolize_names: true)
|
532
|
-
end
|
533
|
-
|
534
|
-
[tool_call_id, tool_name, method_name, tool_arguments]
|
535
|
-
end
|
536
|
-
|
537
|
-
def build_tools(tools)
|
538
|
-
tools.map { |tool| tool.class.function_schemas.to_openai_format }.flatten
|
539
|
-
end
|
540
|
-
|
541
|
-
def allowed_tool_choices
|
542
|
-
["auto", "none"]
|
543
|
-
end
|
544
|
-
|
545
|
-
def available_tool_names(tools)
|
546
|
-
build_tools(tools).map { |tool| tool.dig(:function, :name) }
|
547
|
-
end
|
548
|
-
|
549
|
-
private
|
550
|
-
|
551
|
-
def build_tool_choice(choice)
|
552
|
-
case choice
|
553
|
-
when "auto"
|
554
|
-
choice
|
555
|
-
else
|
556
|
-
{"type" => "function", "function" => {"name" => choice}}
|
557
|
-
end
|
558
|
-
end
|
559
|
-
end
|
560
|
-
|
561
|
-
class MistralAI < Base
|
562
|
-
def build_chat_params(tools:, instructions:, messages:, tool_choice:)
|
563
|
-
params = {messages: messages}
|
564
|
-
if tools.any?
|
565
|
-
params[:tools] = build_tools(tools)
|
566
|
-
params[:tool_choice] = build_tool_choice(tool_choice)
|
567
|
-
end
|
568
|
-
params
|
569
|
-
end
|
570
|
-
|
571
|
-
def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil)
|
572
|
-
Langchain::Messages::MistralAIMessage.new(role: role, content: content, image_url: image_url, tool_calls: tool_calls, tool_call_id: tool_call_id)
|
573
|
-
end
|
574
|
-
|
575
|
-
# Extract the tool call information from the OpenAI tool call hash
|
576
|
-
#
|
577
|
-
# @param tool_call [Hash] The tool call hash
|
578
|
-
# @return [Array] The tool call information
|
579
|
-
def extract_tool_call_args(tool_call:)
|
580
|
-
tool_call_id = tool_call.dig("id")
|
581
|
-
|
582
|
-
function_name = tool_call.dig("function", "name")
|
583
|
-
tool_name, method_name = function_name.split("__")
|
584
|
-
|
585
|
-
tool_arguments = tool_call.dig("function", "arguments")
|
586
|
-
tool_arguments = if tool_arguments.is_a?(Hash)
|
587
|
-
Langchain::Utils::HashTransformer.symbolize_keys(tool_arguments)
|
588
|
-
else
|
589
|
-
JSON.parse(tool_arguments, symbolize_names: true)
|
590
|
-
end
|
591
|
-
|
592
|
-
[tool_call_id, tool_name, method_name, tool_arguments]
|
593
|
-
end
|
594
|
-
|
595
|
-
def build_tools(tools)
|
596
|
-
tools.map { |tool| tool.class.function_schemas.to_openai_format }.flatten
|
597
|
-
end
|
598
|
-
|
599
|
-
def allowed_tool_choices
|
600
|
-
["auto", "none"]
|
601
|
-
end
|
602
|
-
|
603
|
-
def available_tool_names(tools)
|
604
|
-
build_tools(tools).map { |tool| tool.dig(:function, :name) }
|
605
|
-
end
|
606
|
-
|
607
|
-
private
|
608
|
-
|
609
|
-
def build_tool_choice(choice)
|
610
|
-
case choice
|
611
|
-
when "auto"
|
612
|
-
choice
|
613
|
-
else
|
614
|
-
{"type" => "function", "function" => {"name" => choice}}
|
615
|
-
end
|
616
|
-
end
|
617
|
-
end
|
618
|
-
|
619
|
-
class GoogleGemini < Base
|
620
|
-
def build_chat_params(tools:, instructions:, messages:, tool_choice:)
|
621
|
-
params = {messages: messages}
|
622
|
-
if tools.any?
|
623
|
-
params[:tools] = build_tools(tools)
|
624
|
-
params[:system] = instructions if instructions
|
625
|
-
params[:tool_choice] = build_tool_config(tool_choice)
|
626
|
-
end
|
627
|
-
params
|
628
|
-
end
|
629
|
-
|
630
|
-
def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil)
|
631
|
-
warn "Image URL is not supported by Google Gemini" if image_url
|
632
|
-
|
633
|
-
Langchain::Messages::GoogleGeminiMessage.new(role: role, content: content, tool_calls: tool_calls, tool_call_id: tool_call_id)
|
634
|
-
end
|
635
|
-
|
636
|
-
# Extract the tool call information from the Google Gemini tool call hash
|
637
|
-
#
|
638
|
-
# @param tool_call [Hash] The tool call hash, format: {"functionCall"=>{"name"=>"weather__execute", "args"=>{"input"=>"NYC"}}}
|
639
|
-
# @return [Array] The tool call information
|
640
|
-
def extract_tool_call_args(tool_call:)
|
641
|
-
tool_call_id = tool_call.dig("functionCall", "name")
|
642
|
-
function_name = tool_call.dig("functionCall", "name")
|
643
|
-
tool_name, method_name = function_name.split("__")
|
644
|
-
tool_arguments = tool_call.dig("functionCall", "args").transform_keys(&:to_sym)
|
645
|
-
[tool_call_id, tool_name, method_name, tool_arguments]
|
646
|
-
end
|
647
|
-
|
648
|
-
def build_tools(tools)
|
649
|
-
tools.map { |tool| tool.class.function_schemas.to_google_gemini_format }.flatten
|
650
|
-
end
|
651
|
-
|
652
|
-
def allowed_tool_choices
|
653
|
-
["auto", "none"]
|
654
|
-
end
|
655
|
-
|
656
|
-
def available_tool_names(tools)
|
657
|
-
build_tools(tools).map { |tool| tool.dig(:name) }
|
658
|
-
end
|
659
|
-
|
660
|
-
private
|
661
|
-
|
662
|
-
def build_tool_config(choice)
|
663
|
-
case choice
|
664
|
-
when "auto"
|
665
|
-
{function_calling_config: {mode: "auto"}}
|
666
|
-
when "none"
|
667
|
-
{function_calling_config: {mode: "none"}}
|
668
|
-
else
|
669
|
-
{function_calling_config: {mode: "any", allowed_function_names: [choice]}}
|
670
|
-
end
|
671
|
-
end
|
672
|
-
end
|
673
|
-
|
674
|
-
class Anthropic < Base
|
675
|
-
def build_chat_params(tools:, instructions:, messages:, tool_choice:)
|
676
|
-
params = {messages: messages}
|
677
|
-
if tools.any?
|
678
|
-
params[:tools] = build_tools(tools)
|
679
|
-
params[:tool_choice] = build_tool_choice(tool_choice)
|
680
|
-
end
|
681
|
-
params[:system] = instructions if instructions
|
682
|
-
params
|
683
|
-
end
|
684
|
-
|
685
|
-
def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil)
|
686
|
-
warn "Image URL is not supported by Anthropic currently" if image_url
|
687
|
-
|
688
|
-
Langchain::Messages::AnthropicMessage.new(role: role, content: content, tool_calls: tool_calls, tool_call_id: tool_call_id)
|
689
|
-
end
|
690
|
-
|
691
|
-
# Extract the tool call information from the Anthropic tool call hash
|
692
|
-
#
|
693
|
-
# @param tool_call [Hash] The tool call hash, format: {"type"=>"tool_use", "id"=>"toolu_01TjusbFApEbwKPRWTRwzadR", "name"=>"news_retriever__get_top_headlines", "input"=>{"country"=>"us", "page_size"=>10}}], "stop_reason"=>"tool_use"}
|
694
|
-
# @return [Array] The tool call information
|
695
|
-
def extract_tool_call_args(tool_call:)
|
696
|
-
tool_call_id = tool_call.dig("id")
|
697
|
-
function_name = tool_call.dig("name")
|
698
|
-
tool_name, method_name = function_name.split("__")
|
699
|
-
tool_arguments = tool_call.dig("input").transform_keys(&:to_sym)
|
700
|
-
[tool_call_id, tool_name, method_name, tool_arguments]
|
701
|
-
end
|
702
|
-
|
703
|
-
def build_tools(tools)
|
704
|
-
tools.map { |tool| tool.class.function_schemas.to_anthropic_format }.flatten
|
705
|
-
end
|
706
|
-
|
707
|
-
def allowed_tool_choices
|
708
|
-
["auto", "any"]
|
709
|
-
end
|
710
|
-
|
711
|
-
def available_tool_names(tools)
|
712
|
-
build_tools(tools).map { |tool| tool.dig(:name) }
|
713
|
-
end
|
714
|
-
|
715
|
-
private
|
716
|
-
|
717
|
-
def build_tool_choice(choice)
|
718
|
-
case choice
|
719
|
-
when "auto"
|
720
|
-
{type: "auto"}
|
721
|
-
when "any"
|
722
|
-
{type: "any"}
|
723
|
-
else
|
724
|
-
{type: "tool", name: choice}
|
725
|
-
end
|
726
|
-
end
|
727
|
-
end
|
728
|
-
end
|
729
|
-
end
|
730
417
|
end
|
731
418
|
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
Dir[Pathname.new(__FILE__).dirname.join("adapters", "*.rb")].sort.each { |file| require file }
|
2
|
+
|
3
|
+
module Langchain
|
4
|
+
class Assistant
|
5
|
+
module LLM
|
6
|
+
# TODO: Fix the message truncation when context window is exceeded
|
7
|
+
class Adapter
|
8
|
+
def self.build(llm)
|
9
|
+
case llm
|
10
|
+
when Langchain::LLM::Anthropic
|
11
|
+
LLM::Adapters::Anthropic.new
|
12
|
+
when Langchain::LLM::GoogleGemini, Langchain::LLM::GoogleVertexAI
|
13
|
+
LLM::Adapters::GoogleGemini.new
|
14
|
+
when Langchain::LLM::MistralAI
|
15
|
+
LLM::Adapters::MistralAI.new
|
16
|
+
when Langchain::LLM::Ollama
|
17
|
+
LLM::Adapters::Ollama.new
|
18
|
+
when Langchain::LLM::OpenAI
|
19
|
+
LLM::Adapters::OpenAI.new
|
20
|
+
else
|
21
|
+
raise ArgumentError, "Unsupported LLM type: #{llm.class}"
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,21 @@
|
|
1
|
+
module Langchain
|
2
|
+
class Assistant
|
3
|
+
module LLM
|
4
|
+
module Adapters
|
5
|
+
class Base
|
6
|
+
def build_chat_params(tools:, instructions:, messages:, tool_choice:)
|
7
|
+
raise NotImplementedError, "Subclasses must implement build_chat_params"
|
8
|
+
end
|
9
|
+
|
10
|
+
def extract_tool_call_args(tool_call:)
|
11
|
+
raise NotImplementedError, "Subclasses must implement extract_tool_call_args"
|
12
|
+
end
|
13
|
+
|
14
|
+
def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil)
|
15
|
+
raise NotImplementedError, "Subclasses must implement build_message"
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
@@ -0,0 +1,62 @@
|
|
1
|
+
module Langchain
|
2
|
+
class Assistant
|
3
|
+
module LLM
|
4
|
+
module Adapters
|
5
|
+
class Anthropic < Base
|
6
|
+
def build_chat_params(tools:, instructions:, messages:, tool_choice:)
|
7
|
+
params = {messages: messages}
|
8
|
+
if tools.any?
|
9
|
+
params[:tools] = build_tools(tools)
|
10
|
+
params[:tool_choice] = build_tool_choice(tool_choice)
|
11
|
+
end
|
12
|
+
params[:system] = instructions if instructions
|
13
|
+
params
|
14
|
+
end
|
15
|
+
|
16
|
+
def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil)
|
17
|
+
warn "Image URL is not supported by Anthropic currently" if image_url
|
18
|
+
|
19
|
+
Langchain::Messages::AnthropicMessage.new(role: role, content: content, tool_calls: tool_calls, tool_call_id: tool_call_id)
|
20
|
+
end
|
21
|
+
|
22
|
+
# Extract the tool call information from the Anthropic tool call hash
|
23
|
+
#
|
24
|
+
# @param tool_call [Hash] The tool call hash, format: {"type"=>"tool_use", "id"=>"toolu_01TjusbFApEbwKPRWTRwzadR", "name"=>"news_retriever__get_top_headlines", "input"=>{"country"=>"us", "page_size"=>10}}], "stop_reason"=>"tool_use"}
|
25
|
+
# @return [Array] The tool call information
|
26
|
+
def extract_tool_call_args(tool_call:)
|
27
|
+
tool_call_id = tool_call.dig("id")
|
28
|
+
function_name = tool_call.dig("name")
|
29
|
+
tool_name, method_name = function_name.split("__")
|
30
|
+
tool_arguments = tool_call.dig("input").transform_keys(&:to_sym)
|
31
|
+
[tool_call_id, tool_name, method_name, tool_arguments]
|
32
|
+
end
|
33
|
+
|
34
|
+
def build_tools(tools)
|
35
|
+
tools.map { |tool| tool.class.function_schemas.to_anthropic_format }.flatten
|
36
|
+
end
|
37
|
+
|
38
|
+
def allowed_tool_choices
|
39
|
+
["auto", "any"]
|
40
|
+
end
|
41
|
+
|
42
|
+
def available_tool_names(tools)
|
43
|
+
build_tools(tools).map { |tool| tool.dig(:name) }
|
44
|
+
end
|
45
|
+
|
46
|
+
private
|
47
|
+
|
48
|
+
def build_tool_choice(choice)
|
49
|
+
case choice
|
50
|
+
when "auto"
|
51
|
+
{type: "auto"}
|
52
|
+
when "any"
|
53
|
+
{type: "any"}
|
54
|
+
else
|
55
|
+
{type: "tool", name: choice}
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
@@ -0,0 +1,62 @@
|
|
1
|
+
module Langchain
|
2
|
+
class Assistant
|
3
|
+
module LLM
|
4
|
+
module Adapters
|
5
|
+
class GoogleGemini < Base
|
6
|
+
def build_chat_params(tools:, instructions:, messages:, tool_choice:)
|
7
|
+
params = {messages: messages}
|
8
|
+
if tools.any?
|
9
|
+
params[:tools] = build_tools(tools)
|
10
|
+
params[:system] = instructions if instructions
|
11
|
+
params[:tool_choice] = build_tool_config(tool_choice)
|
12
|
+
end
|
13
|
+
params
|
14
|
+
end
|
15
|
+
|
16
|
+
def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil)
|
17
|
+
warn "Image URL is not supported by Google Gemini" if image_url
|
18
|
+
|
19
|
+
Langchain::Messages::GoogleGeminiMessage.new(role: role, content: content, tool_calls: tool_calls, tool_call_id: tool_call_id)
|
20
|
+
end
|
21
|
+
|
22
|
+
# Extract the tool call information from the Google Gemini tool call hash
|
23
|
+
#
|
24
|
+
# @param tool_call [Hash] The tool call hash, format: {"functionCall"=>{"name"=>"weather__execute", "args"=>{"input"=>"NYC"}}}
|
25
|
+
# @return [Array] The tool call information
|
26
|
+
def extract_tool_call_args(tool_call:)
|
27
|
+
tool_call_id = tool_call.dig("functionCall", "name")
|
28
|
+
function_name = tool_call.dig("functionCall", "name")
|
29
|
+
tool_name, method_name = function_name.split("__")
|
30
|
+
tool_arguments = tool_call.dig("functionCall", "args").transform_keys(&:to_sym)
|
31
|
+
[tool_call_id, tool_name, method_name, tool_arguments]
|
32
|
+
end
|
33
|
+
|
34
|
+
def build_tools(tools)
|
35
|
+
tools.map { |tool| tool.class.function_schemas.to_google_gemini_format }.flatten
|
36
|
+
end
|
37
|
+
|
38
|
+
def allowed_tool_choices
|
39
|
+
["auto", "none"]
|
40
|
+
end
|
41
|
+
|
42
|
+
def available_tool_names(tools)
|
43
|
+
build_tools(tools).map { |tool| tool.dig(:name) }
|
44
|
+
end
|
45
|
+
|
46
|
+
private
|
47
|
+
|
48
|
+
def build_tool_config(choice)
|
49
|
+
case choice
|
50
|
+
when "auto"
|
51
|
+
{function_calling_config: {mode: "auto"}}
|
52
|
+
when "none"
|
53
|
+
{function_calling_config: {mode: "none"}}
|
54
|
+
else
|
55
|
+
{function_calling_config: {mode: "any", allowed_function_names: [choice]}}
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
@@ -0,0 +1,65 @@
|
|
1
|
+
module Langchain
|
2
|
+
class Assistant
|
3
|
+
module LLM
|
4
|
+
module Adapters
|
5
|
+
class MistralAI < Base
|
6
|
+
def build_chat_params(tools:, instructions:, messages:, tool_choice:)
|
7
|
+
params = {messages: messages}
|
8
|
+
if tools.any?
|
9
|
+
params[:tools] = build_tools(tools)
|
10
|
+
params[:tool_choice] = build_tool_choice(tool_choice)
|
11
|
+
end
|
12
|
+
params
|
13
|
+
end
|
14
|
+
|
15
|
+
def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil)
|
16
|
+
Langchain::Messages::MistralAIMessage.new(role: role, content: content, image_url: image_url, tool_calls: tool_calls, tool_call_id: tool_call_id)
|
17
|
+
end
|
18
|
+
|
19
|
+
# Extract the tool call information from the OpenAI tool call hash
|
20
|
+
#
|
21
|
+
# @param tool_call [Hash] The tool call hash
|
22
|
+
# @return [Array] The tool call information
|
23
|
+
def extract_tool_call_args(tool_call:)
|
24
|
+
tool_call_id = tool_call.dig("id")
|
25
|
+
|
26
|
+
function_name = tool_call.dig("function", "name")
|
27
|
+
tool_name, method_name = function_name.split("__")
|
28
|
+
|
29
|
+
tool_arguments = tool_call.dig("function", "arguments")
|
30
|
+
tool_arguments = if tool_arguments.is_a?(Hash)
|
31
|
+
Langchain::Utils::HashTransformer.symbolize_keys(tool_arguments)
|
32
|
+
else
|
33
|
+
JSON.parse(tool_arguments, symbolize_names: true)
|
34
|
+
end
|
35
|
+
|
36
|
+
[tool_call_id, tool_name, method_name, tool_arguments]
|
37
|
+
end
|
38
|
+
|
39
|
+
def build_tools(tools)
|
40
|
+
tools.map { |tool| tool.class.function_schemas.to_openai_format }.flatten
|
41
|
+
end
|
42
|
+
|
43
|
+
def allowed_tool_choices
|
44
|
+
["auto", "none"]
|
45
|
+
end
|
46
|
+
|
47
|
+
def available_tool_names(tools)
|
48
|
+
build_tools(tools).map { |tool| tool.dig(:function, :name) }
|
49
|
+
end
|
50
|
+
|
51
|
+
private
|
52
|
+
|
53
|
+
def build_tool_choice(choice)
|
54
|
+
case choice
|
55
|
+
when "auto"
|
56
|
+
choice
|
57
|
+
else
|
58
|
+
{"type" => "function", "function" => {"name" => choice}}
|
59
|
+
end
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
64
|
+
end
|
65
|
+
end
|
@@ -0,0 +1,57 @@
|
|
1
|
+
module Langchain
|
2
|
+
class Assistant
|
3
|
+
module LLM
|
4
|
+
module Adapters
|
5
|
+
class Ollama < Base
|
6
|
+
def build_chat_params(tools:, instructions:, messages:, tool_choice:)
|
7
|
+
params = {messages: messages}
|
8
|
+
if tools.any?
|
9
|
+
params[:tools] = build_tools(tools)
|
10
|
+
end
|
11
|
+
params
|
12
|
+
end
|
13
|
+
|
14
|
+
def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil)
|
15
|
+
warn "Image URL is not supported by Ollama currently" if image_url
|
16
|
+
|
17
|
+
Langchain::Messages::OllamaMessage.new(role: role, content: content, tool_calls: tool_calls, tool_call_id: tool_call_id)
|
18
|
+
end
|
19
|
+
|
20
|
+
# Extract the tool call information from the OpenAI tool call hash
|
21
|
+
#
|
22
|
+
# @param tool_call [Hash] The tool call hash
|
23
|
+
# @return [Array] The tool call information
|
24
|
+
def extract_tool_call_args(tool_call:)
|
25
|
+
tool_call_id = tool_call.dig("id")
|
26
|
+
|
27
|
+
function_name = tool_call.dig("function", "name")
|
28
|
+
tool_name, method_name = function_name.split("__")
|
29
|
+
|
30
|
+
tool_arguments = tool_call.dig("function", "arguments")
|
31
|
+
tool_arguments = if tool_arguments.is_a?(Hash)
|
32
|
+
Langchain::Utils::HashTransformer.symbolize_keys(tool_arguments)
|
33
|
+
else
|
34
|
+
JSON.parse(tool_arguments, symbolize_names: true)
|
35
|
+
end
|
36
|
+
|
37
|
+
[tool_call_id, tool_name, method_name, tool_arguments]
|
38
|
+
end
|
39
|
+
|
40
|
+
def available_tool_names(tools)
|
41
|
+
build_tools(tools).map { |tool| tool.dig(:function, :name) }
|
42
|
+
end
|
43
|
+
|
44
|
+
def allowed_tool_choices
|
45
|
+
["auto", "none"]
|
46
|
+
end
|
47
|
+
|
48
|
+
private
|
49
|
+
|
50
|
+
def build_tools(tools)
|
51
|
+
tools.map { |tool| tool.class.function_schemas.to_openai_format }.flatten
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
@@ -0,0 +1,65 @@
|
|
1
|
+
module Langchain
|
2
|
+
class Assistant
|
3
|
+
module LLM
|
4
|
+
module Adapters
|
5
|
+
class OpenAI < Base
|
6
|
+
def build_chat_params(tools:, instructions:, messages:, tool_choice:)
|
7
|
+
params = {messages: messages}
|
8
|
+
if tools.any?
|
9
|
+
params[:tools] = build_tools(tools)
|
10
|
+
params[:tool_choice] = build_tool_choice(tool_choice)
|
11
|
+
end
|
12
|
+
params
|
13
|
+
end
|
14
|
+
|
15
|
+
def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil)
|
16
|
+
Langchain::Messages::OpenAIMessage.new(role: role, content: content, image_url: image_url, tool_calls: tool_calls, tool_call_id: tool_call_id)
|
17
|
+
end
|
18
|
+
|
19
|
+
# Extract the tool call information from the OpenAI tool call hash
|
20
|
+
#
|
21
|
+
# @param tool_call [Hash] The tool call hash
|
22
|
+
# @return [Array] The tool call information
|
23
|
+
def extract_tool_call_args(tool_call:)
|
24
|
+
tool_call_id = tool_call.dig("id")
|
25
|
+
|
26
|
+
function_name = tool_call.dig("function", "name")
|
27
|
+
tool_name, method_name = function_name.split("__")
|
28
|
+
|
29
|
+
tool_arguments = tool_call.dig("function", "arguments")
|
30
|
+
tool_arguments = if tool_arguments.is_a?(Hash)
|
31
|
+
Langchain::Utils::HashTransformer.symbolize_keys(tool_arguments)
|
32
|
+
else
|
33
|
+
JSON.parse(tool_arguments, symbolize_names: true)
|
34
|
+
end
|
35
|
+
|
36
|
+
[tool_call_id, tool_name, method_name, tool_arguments]
|
37
|
+
end
|
38
|
+
|
39
|
+
def build_tools(tools)
|
40
|
+
tools.map { |tool| tool.class.function_schemas.to_openai_format }.flatten
|
41
|
+
end
|
42
|
+
|
43
|
+
def allowed_tool_choices
|
44
|
+
["auto", "none"]
|
45
|
+
end
|
46
|
+
|
47
|
+
def available_tool_names(tools)
|
48
|
+
build_tools(tools).map { |tool| tool.dig(:function, :name) }
|
49
|
+
end
|
50
|
+
|
51
|
+
private
|
52
|
+
|
53
|
+
def build_tool_choice(choice)
|
54
|
+
case choice
|
55
|
+
when "auto"
|
56
|
+
choice
|
57
|
+
else
|
58
|
+
{"type" => "function", "function" => {"name" => choice}}
|
59
|
+
end
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
64
|
+
end
|
65
|
+
end
|
@@ -14,7 +14,7 @@ module Langchain::LLM
|
|
14
14
|
DEFAULTS = {
|
15
15
|
temperature: 0.0,
|
16
16
|
completion_model_name: "claude-2.1",
|
17
|
-
chat_completion_model_name: "claude-3-sonnet-
|
17
|
+
chat_completion_model_name: "claude-3-5-sonnet-20240620",
|
18
18
|
max_tokens_to_sample: 256
|
19
19
|
}.freeze
|
20
20
|
|
@@ -109,6 +109,7 @@ module Langchain::LLM
|
|
109
109
|
raise ArgumentError.new("model argument is required") if parameters[:model].empty?
|
110
110
|
raise ArgumentError.new("max_tokens argument is required") if parameters[:max_tokens].nil?
|
111
111
|
|
112
|
+
binding.pry
|
112
113
|
response = client.messages(parameters: parameters)
|
113
114
|
|
114
115
|
Langchain::LLM::AnthropicResponse.new(response)
|
@@ -122,7 +122,7 @@ module Langchain::Tool
|
|
122
122
|
end
|
123
123
|
db.foreign_key_list(table).each do |fk|
|
124
124
|
schema << ",\n" if fk == db.foreign_key_list(table).first
|
125
|
-
schema << "FOREIGN KEY (#{fk[:columns]
|
125
|
+
schema << "FOREIGN KEY (#{fk[:columns]&.first}) REFERENCES #{fk[:table]}(#{fk[:key]&.first})"
|
126
126
|
schema << ",\n" unless fk == db.foreign_key_list(table).last
|
127
127
|
end
|
128
128
|
schema << ");\n"
|
data/lib/langchain/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: langchainrb
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.17.
|
4
|
+
version: 0.17.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Andrei Bondarev
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-10-
|
11
|
+
date: 2024-10-07 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: baran
|
@@ -638,6 +638,13 @@ files:
|
|
638
638
|
- README.md
|
639
639
|
- lib/langchain.rb
|
640
640
|
- lib/langchain/assistants/assistant.rb
|
641
|
+
- lib/langchain/assistants/llm/adapter.rb
|
642
|
+
- lib/langchain/assistants/llm/adapters/_base.rb
|
643
|
+
- lib/langchain/assistants/llm/adapters/anthropic.rb
|
644
|
+
- lib/langchain/assistants/llm/adapters/google_gemini.rb
|
645
|
+
- lib/langchain/assistants/llm/adapters/mistral_ai.rb
|
646
|
+
- lib/langchain/assistants/llm/adapters/ollama.rb
|
647
|
+
- lib/langchain/assistants/llm/adapters/openai.rb
|
641
648
|
- lib/langchain/assistants/messages/anthropic_message.rb
|
642
649
|
- lib/langchain/assistants/messages/base.rb
|
643
650
|
- lib/langchain/assistants/messages/google_gemini_message.rb
|