langchainrb 0.19.5 → 1.19.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langchainrb might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/CHANGELOG.md +3 -18
- data/README.md +13 -9
- data/lib/langchain/assistant/llm/adapters/openai.rb +1 -3
- data/lib/langchain/assistant/messages/anthropic_message.rb +6 -6
- data/lib/langchain/assistant.rb +7 -21
- data/lib/langchain/llm/ai21.rb +0 -2
- data/lib/langchain/llm/anthropic.rb +4 -11
- data/lib/langchain/llm/aws_bedrock.rb +1 -1
- data/lib/langchain/llm/base.rb +2 -0
- data/lib/langchain/llm/llama_cpp.rb +0 -2
- data/lib/langchain/llm/ollama.rb +3 -4
- data/lib/langchain/llm/openai.rb +2 -1
- data/lib/langchain/llm/parameters/chat.rb +0 -1
- data/lib/langchain/output_parsers/base.rb +11 -0
- data/lib/langchain/tool/calculator.rb +3 -4
- data/lib/langchain/tool/database.rb +11 -17
- data/lib/langchain/tool/file_system.rb +5 -6
- data/lib/langchain/tool/google_search.rb +15 -15
- data/lib/langchain/tool/news_retriever.rb +6 -9
- data/lib/langchain/tool/ruby_code_interpreter.rb +3 -3
- data/lib/langchain/tool/tavily.rb +2 -2
- data/lib/langchain/tool/vectorsearch.rb +1 -3
- data/lib/langchain/tool/weather.rb +3 -3
- data/lib/langchain/tool/wikipedia.rb +2 -2
- data/lib/langchain/tool_definition.rb +0 -14
- data/lib/langchain/vectorsearch/base.rb +2 -0
- data/lib/langchain/vectorsearch/chroma.rb +1 -2
- data/lib/langchain/vectorsearch/elasticsearch.rb +1 -1
- data/lib/langchain/vectorsearch/epsilla.rb +0 -2
- data/lib/langchain/vectorsearch/milvus.rb +0 -1
- data/lib/langchain/vectorsearch/pgvector.rb +11 -35
- data/lib/langchain/version.rb +1 -1
- data/lib/langchain.rb +4 -0
- metadata +47 -38
- data/lib/langchain/output_parsers/output_parser_exception.rb +0 -10
- data/lib/langchain/tool_response.rb +0 -24
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 7e15dfbab8b4380ef4c71d1b596ed5f19ff55feefb01cf091903c792ab66af5f
|
4
|
+
data.tar.gz: de4d29455f1e29b34f04026868287c4464f94e45d15f6113705746b421b72fb5
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 88e052d2a3a8ca936ed0c8d6d638351fdf7418e91e33756d0295039423de28fc0e25273c5e8afed94792ff464f17f883904f472464185db40242476e8530c6d4
|
7
|
+
data.tar.gz: c9cc9d7b06b1b329ca67973009e5248c688de972271cc77d8a946c00fa0bc4501f9913159bafc068ee8b3468eea47d5c3860c12bc4fbb46adbf44aedc7816531
|
data/CHANGELOG.md
CHANGED
@@ -9,24 +9,9 @@
|
|
9
9
|
- [DOCS]: Documentation changes. No changes to the library's behavior.
|
10
10
|
- [SECURITY]: A change which fixes a security vulnerability.
|
11
11
|
|
12
|
-
## [
|
13
|
-
|
14
|
-
|
15
|
-
- [BUGFIX] [https://github.com/patterns-ai-core/langchainrb/pull/953] Handle nil response in OpenAI LLM streaming
|
16
|
-
- [BREAKING] [https://github.com/patterns-ai-core/langchainrb/pull/956] Deprecate `Langchain::Vectorsearch::Epsilla` class
|
17
|
-
- [BREAKING] [https://github.com/patterns-ai-core/langchainrb/pull/961] Deprecate `Langchain::LLM::LlamaCpp` class
|
18
|
-
- [BREAKING] [https://github.com/patterns-ai-core/langchainrb/pull/962] Deprecate `Langchain::LLM::AI21` class
|
19
|
-
- [BREAKING] [https://github.com/patterns-ai-core/langchainrb/pull/971] Exclude `temperature` from being automatically added to OpenAI LLM parameters
|
20
|
-
- [OPTIM] [https://github.com/patterns-ai-core/langchainrb/pull/977] Enable `Langchain::Tool::RubyCodeInterpreter` on Ruby 3.3+
|
21
|
-
|
22
|
-
## [0.19.4] - 2025-02-17
|
23
|
-
- [BREAKING] [https://github.com/patterns-ai-core/langchainrb/pull/894] Tools can now output image_urls, and all tool output must be wrapped by a tool_response() method
|
24
|
-
- [BUGFIX] [https://github.com/patterns-ai-core/langchainrb/pull/921] Fix for Assistant when OpenAI o1/o3 models are used
|
25
|
-
|
26
|
-
## [0.19.3] - 2025-01-13
|
27
|
-
- [BUGFIX] [https://github.com/patterns-ai-core/langchainrb/pull/900] Empty text content should not be set when content is nil when using AnthropicMessage
|
28
|
-
|
29
|
-
## [0.19.2] - 2024-11-26
|
12
|
+
## [Unreleased]
|
13
|
+
|
14
|
+
## [0.19.2] - 2024-11-25
|
30
15
|
- [FEATURE] [https://github.com/patterns-ai-core/langchainrb/pull/884] Add `tool_execution_callback` to `Langchain::Assistant`, a callback function (proc, lambda) that is called right before a tool is executed
|
31
16
|
|
32
17
|
## [0.19.1] - 2024-11-21
|
data/README.md
CHANGED
@@ -2,14 +2,14 @@
|
|
2
2
|
---
|
3
3
|
⚡ Building LLM-powered applications in Ruby ⚡
|
4
4
|
|
5
|
-
For deep Rails integration see: [langchainrb_rails](https://github.com/
|
5
|
+
For deep Rails integration see: [langchainrb_rails](https://github.com/andreibondarev/langchainrb_rails) gem.
|
6
6
|
|
7
7
|
Available for paid consulting engagements! [Email me](mailto:andrei@sourcelabs.io).
|
8
8
|
|
9
|
-

|
10
10
|
[](https://badge.fury.io/rb/langchainrb)
|
11
11
|
[](http://rubydoc.info/gems/langchainrb)
|
12
|
-
[](https://github.com/
|
12
|
+
[](https://github.com/andreibondarev/langchainrb/blob/main/LICENSE.txt)
|
13
13
|
[](https://discord.gg/WDARp7J2n8)
|
14
14
|
[](https://twitter.com/rushing_andrei)
|
15
15
|
|
@@ -57,6 +57,7 @@ The `Langchain::LLM` module provides a unified interface for interacting with va
|
|
57
57
|
|
58
58
|
## Supported LLM Providers
|
59
59
|
|
60
|
+
- AI21
|
60
61
|
- Anthropic
|
61
62
|
- AWS Bedrock
|
62
63
|
- Azure OpenAI
|
@@ -64,6 +65,7 @@ The `Langchain::LLM` module provides a unified interface for interacting with va
|
|
64
65
|
- Google Gemini
|
65
66
|
- Google Vertex AI
|
66
67
|
- HuggingFace
|
68
|
+
- LlamaCpp
|
67
69
|
- Mistral AI
|
68
70
|
- Ollama
|
69
71
|
- OpenAI
|
@@ -367,7 +369,7 @@ fix_parser = Langchain::OutputParsers::OutputFixingParser.from_llm(
|
|
367
369
|
fix_parser.parse(llm_response)
|
368
370
|
```
|
369
371
|
|
370
|
-
See [here](https://github.com/
|
372
|
+
See [here](https://github.com/andreibondarev/langchainrb/tree/main/examples/create_and_manage_prompt_templates_using_structured_output_parser.rb) for a concrete example
|
371
373
|
|
372
374
|
## Building Retrieval Augment Generation (RAG) system
|
373
375
|
RAG is a methodology that assists LLMs generate accurate and up-to-date information.
|
@@ -385,6 +387,7 @@ Langchain.rb provides a convenient unified interface on top of supported vectors
|
|
385
387
|
| Database | Open-source | Cloud offering |
|
386
388
|
| -------- |:------------------:| :------------: |
|
387
389
|
| [Chroma](https://trychroma.com/?utm_source=langchainrb&utm_medium=github) | ✅ | ✅ |
|
390
|
+
| [Epsilla](https://epsilla.com/?utm_source=langchainrb&utm_medium=github) | ✅ | ✅ |
|
388
391
|
| [Hnswlib](https://github.com/nmslib/hnswlib/?utm_source=langchainrb&utm_medium=github) | ✅ | ❌ |
|
389
392
|
| [Milvus](https://milvus.io/?utm_source=langchainrb&utm_medium=github) | ✅ | ✅ Zilliz Cloud |
|
390
393
|
| [Pinecone](https://www.pinecone.io/?utm_source=langchainrb&utm_medium=github) | ❌ | ✅ |
|
@@ -417,6 +420,7 @@ client = Langchain::Vectorsearch::Weaviate.new(
|
|
417
420
|
You can instantiate any other supported vector search database:
|
418
421
|
```ruby
|
419
422
|
client = Langchain::Vectorsearch::Chroma.new(...) # `gem "chroma-db", "~> 0.6.0"`
|
423
|
+
client = Langchain::Vectorsearch::Epsilla.new(...) # `gem "epsilla-ruby", "~> 0.0.3"`
|
420
424
|
client = Langchain::Vectorsearch::Hnswlib.new(...) # `gem "hnswlib", "~> 0.8.1"`
|
421
425
|
client = Langchain::Vectorsearch::Milvus.new(...) # `gem "milvus", "~> 0.9.3"`
|
422
426
|
client = Langchain::Vectorsearch::Pinecone.new(...) # `gem "pinecone", "~> 0.1.6"`
|
@@ -576,11 +580,11 @@ class MovieInfoTool
|
|
576
580
|
end
|
577
581
|
|
578
582
|
def search_movie(query:)
|
579
|
-
|
583
|
+
...
|
580
584
|
end
|
581
585
|
|
582
586
|
def get_movie_details(movie_id:)
|
583
|
-
|
587
|
+
...
|
584
588
|
end
|
585
589
|
end
|
586
590
|
```
|
@@ -635,7 +639,7 @@ ragas.score(answer: "", question: "", context: "")
|
|
635
639
|
```
|
636
640
|
|
637
641
|
## Examples
|
638
|
-
Additional examples available: [/examples](https://github.com/
|
642
|
+
Additional examples available: [/examples](https://github.com/andreibondarev/langchainrb/tree/main/examples)
|
639
643
|
|
640
644
|
## Logging
|
641
645
|
|
@@ -661,7 +665,7 @@ gem install unicode -- --with-cflags="-Wno-incompatible-function-pointer-types"
|
|
661
665
|
|
662
666
|
## Development
|
663
667
|
|
664
|
-
1. `git clone https://github.com/
|
668
|
+
1. `git clone https://github.com/andreibondarev/langchainrb.git`
|
665
669
|
2. `cp .env.example .env`, then fill out the environment variables in `.env`
|
666
670
|
3. `bundle exec rake` to ensure that the tests pass and to run standardrb
|
667
671
|
4. `bin/console` to load the gem in a REPL session. Feel free to add your own instances of LLMs, Tools, Agents, etc. and experiment with them.
|
@@ -676,7 +680,7 @@ Join us in the [Langchain.rb](https://discord.gg/WDARp7J2n8) Discord server.
|
|
676
680
|
|
677
681
|
## Contributing
|
678
682
|
|
679
|
-
Bug reports and pull requests are welcome on GitHub at https://github.com/
|
683
|
+
Bug reports and pull requests are welcome on GitHub at https://github.com/andreibondarev/langchainrb.
|
680
684
|
|
681
685
|
## License
|
682
686
|
|
@@ -24,9 +24,7 @@ module Langchain
|
|
24
24
|
if tools.any?
|
25
25
|
params[:tools] = build_tools(tools)
|
26
26
|
params[:tool_choice] = build_tool_choice(tool_choice)
|
27
|
-
|
28
|
-
# Set `Assistant.new(parallel_tool_calls: nil, ...)` to avoid the error.
|
29
|
-
params[:parallel_tool_calls] = parallel_tool_calls unless parallel_tool_calls.nil?
|
27
|
+
params[:parallel_tool_calls] = parallel_tool_calls
|
30
28
|
end
|
31
29
|
params
|
32
30
|
end
|
@@ -53,14 +53,14 @@ module Langchain
|
|
53
53
|
#
|
54
54
|
# @return [Hash] The message as an Anthropic API-compatible hash, with the role as "assistant"
|
55
55
|
def assistant_hash
|
56
|
-
content_array = []
|
57
|
-
if content && !content.empty?
|
58
|
-
content_array << {type: "text", text: content}
|
59
|
-
end
|
60
|
-
|
61
56
|
{
|
62
57
|
role: "assistant",
|
63
|
-
content:
|
58
|
+
content: [
|
59
|
+
{
|
60
|
+
type: "text",
|
61
|
+
text: content
|
62
|
+
}
|
63
|
+
].concat(tool_calls)
|
64
64
|
}
|
65
65
|
end
|
66
66
|
|
data/lib/langchain/assistant.rb
CHANGED
@@ -354,30 +354,16 @@ module Langchain
|
|
354
354
|
def run_tools(tool_calls)
|
355
355
|
# Iterate over each function invocation and submit tool output
|
356
356
|
tool_calls.each do |tool_call|
|
357
|
-
|
358
|
-
end
|
359
|
-
end
|
360
|
-
|
361
|
-
# Run the tool call
|
362
|
-
#
|
363
|
-
# @param tool_call [Hash] The tool call to run
|
364
|
-
# @return [Object] The result of the tool call
|
365
|
-
def run_tool(tool_call)
|
366
|
-
tool_call_id, tool_name, method_name, tool_arguments = @llm_adapter.extract_tool_call_args(tool_call: tool_call)
|
367
|
-
|
368
|
-
tool_instance = tools.find do |t|
|
369
|
-
t.class.tool_name == tool_name
|
370
|
-
end or raise ArgumentError, "Tool: #{tool_name} not found in assistant.tools"
|
357
|
+
tool_call_id, tool_name, method_name, tool_arguments = @llm_adapter.extract_tool_call_args(tool_call: tool_call)
|
371
358
|
|
372
|
-
|
373
|
-
|
359
|
+
tool_instance = tools.find do |t|
|
360
|
+
t.class.tool_name == tool_name
|
361
|
+
end or raise ArgumentError, "Tool: #{tool_name} not found in assistant.tools"
|
374
362
|
|
375
|
-
|
363
|
+
# Call the callback if set
|
364
|
+
tool_execution_callback.call(tool_call_id, tool_name, method_name, tool_arguments) if tool_execution_callback # rubocop:disable Style/SafeNavigation
|
365
|
+
output = tool_instance.send(method_name, **tool_arguments)
|
376
366
|
|
377
|
-
# Handle both ToolResponse and legacy return values
|
378
|
-
if output.is_a?(ToolResponse)
|
379
|
-
add_message(role: @llm_adapter.tool_role, content: output.content, image_url: output.image_url, tool_call_id: tool_call_id)
|
380
|
-
else
|
381
367
|
submit_tool_output(tool_call_id: tool_call_id, output: output)
|
382
368
|
end
|
383
369
|
end
|
data/lib/langchain/llm/ai21.rb
CHANGED
@@ -17,8 +17,6 @@ module Langchain::LLM
|
|
17
17
|
}.freeze
|
18
18
|
|
19
19
|
def initialize(api_key:, default_options: {})
|
20
|
-
Langchain.logger.warn "DEPRECATED: `Langchain::LLM::AI21` is deprecated, and will be removed in the next major version. Please use another LLM provider."
|
21
|
-
|
22
20
|
depends_on "ai21"
|
23
21
|
|
24
22
|
@client = ::AI21::Client.new(api_key)
|
@@ -22,15 +22,10 @@ module Langchain::LLM
|
|
22
22
|
#
|
23
23
|
# @param api_key [String] The API key to use
|
24
24
|
# @param llm_options [Hash] Options to pass to the Anthropic client
|
25
|
-
# @param default_options [Hash] Default options to use on every call to LLM, e.g.: { temperature:, completion_model:, chat_model:, max_tokens
|
25
|
+
# @param default_options [Hash] Default options to use on every call to LLM, e.g.: { temperature:, completion_model:, chat_model:, max_tokens: }
|
26
26
|
# @return [Langchain::LLM::Anthropic] Langchain::LLM::Anthropic instance
|
27
27
|
def initialize(api_key:, llm_options: {}, default_options: {})
|
28
|
-
|
29
|
-
depends_on "ruby-anthropic", req: "anthropic"
|
30
|
-
rescue Langchain::DependencyHelper::LoadError
|
31
|
-
# Falls back to the older `anthropic` gem if `ruby-anthropic` gem cannot be loaded.
|
32
|
-
depends_on "anthropic"
|
33
|
-
end
|
28
|
+
depends_on "anthropic"
|
34
29
|
|
35
30
|
@client = ::Anthropic::Client.new(access_token: api_key, **llm_options)
|
36
31
|
@defaults = DEFAULTS.merge(default_options)
|
@@ -39,8 +34,7 @@ module Langchain::LLM
|
|
39
34
|
temperature: {default: @defaults[:temperature]},
|
40
35
|
max_tokens: {default: @defaults[:max_tokens]},
|
41
36
|
metadata: {},
|
42
|
-
system: {}
|
43
|
-
thinking: {default: @defaults[:thinking]}
|
37
|
+
system: {}
|
44
38
|
)
|
45
39
|
chat_parameters.ignore(:n, :user)
|
46
40
|
chat_parameters.remap(stop: :stop_sequences)
|
@@ -103,7 +97,6 @@ module Langchain::LLM
|
|
103
97
|
# @option params [String] :system System prompt
|
104
98
|
# @option params [Float] :temperature Amount of randomness injected into the response
|
105
99
|
# @option params [Array<String>] :tools Definitions of tools that the model may use
|
106
|
-
# @option params [Hash] :thinking Enable extended thinking mode, e.g. { type: "enabled", budget_tokens: 4000 }
|
107
100
|
# @option params [Integer] :top_k Only sample from the top K options for each subsequent token
|
108
101
|
# @option params [Float] :top_p Use nucleus sampling.
|
109
102
|
# @return [Langchain::LLM::AnthropicResponse] The chat completion
|
@@ -177,7 +170,7 @@ module Langchain::LLM
|
|
177
170
|
"id" => first_block.dig("content_block", "id"),
|
178
171
|
"type" => "tool_use",
|
179
172
|
"name" => first_block.dig("content_block", "name"),
|
180
|
-
"input" =>
|
173
|
+
"input" => JSON.parse(input).transform_keys(&:to_sym)
|
181
174
|
}
|
182
175
|
end.compact
|
183
176
|
end
|
@@ -167,7 +167,7 @@ module Langchain::LLM
|
|
167
167
|
|
168
168
|
def parse_model_id(model_id)
|
169
169
|
model_id
|
170
|
-
.gsub(
|
170
|
+
.gsub("us.", "") # Meta append "us." to their model ids
|
171
171
|
.split(".")
|
172
172
|
end
|
173
173
|
|
data/lib/langchain/llm/base.rb
CHANGED
@@ -7,12 +7,14 @@ module Langchain::LLM
|
|
7
7
|
#
|
8
8
|
# Langchain.rb provides a common interface to interact with all supported LLMs:
|
9
9
|
#
|
10
|
+
# - {Langchain::LLM::AI21}
|
10
11
|
# - {Langchain::LLM::Anthropic}
|
11
12
|
# - {Langchain::LLM::Azure}
|
12
13
|
# - {Langchain::LLM::Cohere}
|
13
14
|
# - {Langchain::LLM::GoogleGemini}
|
14
15
|
# - {Langchain::LLM::GoogleVertexAI}
|
15
16
|
# - {Langchain::LLM::HuggingFace}
|
17
|
+
# - {Langchain::LLM::LlamaCpp}
|
16
18
|
# - {Langchain::LLM::OpenAI}
|
17
19
|
# - {Langchain::LLM::Replicate}
|
18
20
|
#
|
@@ -23,8 +23,6 @@ module Langchain::LLM
|
|
23
23
|
# @param n_threads [Integer] The CPU number of threads to use
|
24
24
|
# @param seed [Integer] The seed to use
|
25
25
|
def initialize(model_path:, n_gpu_layers: 1, n_ctx: 2048, n_threads: 1, seed: 0)
|
26
|
-
Langchain.logger.warn "DEPRECATED: `Langchain::LLM::LlamaCpp` is deprecated, and will be removed in the next major version. Please use `Langchain::LLM::Ollama` for self-hosted LLM inference."
|
27
|
-
|
28
26
|
depends_on "llama_cpp"
|
29
27
|
|
30
28
|
@model_path = model_path
|
data/lib/langchain/llm/ollama.rb
CHANGED
@@ -12,9 +12,9 @@ module Langchain::LLM
|
|
12
12
|
|
13
13
|
DEFAULTS = {
|
14
14
|
temperature: 0.0,
|
15
|
-
completion_model: "llama3.
|
16
|
-
embedding_model: "llama3.
|
17
|
-
chat_model: "llama3.
|
15
|
+
completion_model: "llama3.1",
|
16
|
+
embedding_model: "llama3.1",
|
17
|
+
chat_model: "llama3.1",
|
18
18
|
options: {}
|
19
19
|
}.freeze
|
20
20
|
|
@@ -24,7 +24,6 @@ module Langchain::LLM
|
|
24
24
|
llama2: 4_096,
|
25
25
|
llama3: 4_096,
|
26
26
|
"llama3.1": 4_096,
|
27
|
-
"llama3.2": 4_096,
|
28
27
|
llava: 4_096,
|
29
28
|
mistral: 4_096,
|
30
29
|
"mistral-openorca": 4_096,
|
data/lib/langchain/llm/openai.rb
CHANGED
@@ -15,6 +15,7 @@ module Langchain::LLM
|
|
15
15
|
class OpenAI < Base
|
16
16
|
DEFAULTS = {
|
17
17
|
n: 1,
|
18
|
+
temperature: 0.0,
|
18
19
|
chat_model: "gpt-4o-mini",
|
19
20
|
embedding_model: "text-embedding-3-small"
|
20
21
|
}.freeze
|
@@ -172,7 +173,7 @@ module Langchain::LLM
|
|
172
173
|
|
173
174
|
def with_api_error_handling
|
174
175
|
response = yield
|
175
|
-
return if response.
|
176
|
+
return if response.empty?
|
176
177
|
|
177
178
|
raise Langchain::LLM::ApiError.new "OpenAI API error: #{response.dig("error", "message")}" if response&.dig("error")
|
178
179
|
|
@@ -28,4 +28,15 @@ module Langchain::OutputParsers
|
|
28
28
|
raise NotImplementedError
|
29
29
|
end
|
30
30
|
end
|
31
|
+
|
32
|
+
class OutputParserException < StandardError
|
33
|
+
def initialize(message, text)
|
34
|
+
@message = message
|
35
|
+
@text = text
|
36
|
+
end
|
37
|
+
|
38
|
+
def to_s
|
39
|
+
"#{@message}\nText: #{@text}"
|
40
|
+
end
|
41
|
+
end
|
31
42
|
end
|
@@ -26,14 +26,13 @@ module Langchain::Tool
|
|
26
26
|
# Evaluates a pure math expression or if equation contains non-math characters (e.g.: "12F in Celsius") then it uses the google search calculator to evaluate the expression
|
27
27
|
#
|
28
28
|
# @param input [String] math expression
|
29
|
-
# @return [
|
29
|
+
# @return [String] Answer
|
30
30
|
def execute(input:)
|
31
31
|
Langchain.logger.debug("#{self.class} - Executing \"#{input}\"")
|
32
32
|
|
33
|
-
|
34
|
-
tool_response(content: result)
|
33
|
+
Eqn::Calculator.calc(input)
|
35
34
|
rescue Eqn::ParseError, Eqn::NoVariableValueError
|
36
|
-
|
35
|
+
"\"#{input}\" is an invalid mathematical expression"
|
37
36
|
end
|
38
37
|
end
|
39
38
|
end
|
@@ -5,7 +5,7 @@ module Langchain::Tool
|
|
5
5
|
# Connects to a SQL database, executes SQL queries, and outputs DB schema for Agents to use
|
6
6
|
#
|
7
7
|
# Gem requirements:
|
8
|
-
# gem "sequel", "~> 5.
|
8
|
+
# gem "sequel", "~> 5.68.0"
|
9
9
|
#
|
10
10
|
# Usage:
|
11
11
|
# database = Langchain::Tool::Database.new(connection_string: "postgres://user:password@localhost:5432/db_name")
|
@@ -49,53 +49,50 @@ module Langchain::Tool
|
|
49
49
|
|
50
50
|
# Database Tool: Returns a list of tables in the database
|
51
51
|
#
|
52
|
-
# @return [
|
52
|
+
# @return [Array<Symbol>] List of tables in the database
|
53
53
|
def list_tables
|
54
|
-
|
54
|
+
db.tables
|
55
55
|
end
|
56
56
|
|
57
57
|
# Database Tool: Returns the schema for a list of tables
|
58
58
|
#
|
59
59
|
# @param tables [Array<String>] The tables to describe.
|
60
|
-
# @return [
|
60
|
+
# @return [String] The schema for the tables
|
61
61
|
def describe_tables(tables: [])
|
62
62
|
return "No tables specified" if tables.empty?
|
63
63
|
|
64
64
|
Langchain.logger.debug("#{self.class} - Describing tables: #{tables}")
|
65
65
|
|
66
|
-
|
66
|
+
tables
|
67
67
|
.map do |table|
|
68
68
|
describe_table(table)
|
69
69
|
end
|
70
70
|
.join("\n")
|
71
|
-
|
72
|
-
tool_response(content: result)
|
73
71
|
end
|
74
72
|
|
75
73
|
# Database Tool: Returns the database schema
|
76
74
|
#
|
77
|
-
# @return [
|
75
|
+
# @return [String] Database schema
|
78
76
|
def dump_schema
|
79
77
|
Langchain.logger.debug("#{self.class} - Dumping schema tables and keys")
|
80
78
|
|
81
79
|
schemas = db.tables.map do |table|
|
82
80
|
describe_table(table)
|
83
81
|
end
|
84
|
-
|
85
|
-
tool_response(content: schemas.join("\n"))
|
82
|
+
schemas.join("\n")
|
86
83
|
end
|
87
84
|
|
88
85
|
# Database Tool: Executes a SQL query and returns the results
|
89
86
|
#
|
90
87
|
# @param input [String] SQL query to be executed
|
91
|
-
# @return [
|
88
|
+
# @return [Array] Results from the SQL query
|
92
89
|
def execute(input:)
|
93
90
|
Langchain.logger.debug("#{self.class} - Executing \"#{input}\"")
|
94
91
|
|
95
|
-
|
92
|
+
db[input].to_a
|
96
93
|
rescue Sequel::DatabaseError => e
|
97
94
|
Langchain.logger.error("#{self.class} - #{e.message}")
|
98
|
-
|
95
|
+
e.message # Return error to LLM
|
99
96
|
end
|
100
97
|
|
101
98
|
private
|
@@ -103,7 +100,7 @@ module Langchain::Tool
|
|
103
100
|
# Describes a table and its schema
|
104
101
|
#
|
105
102
|
# @param table [String] The table to describe
|
106
|
-
# @return [
|
103
|
+
# @return [String] The schema for the table
|
107
104
|
def describe_table(table)
|
108
105
|
# TODO: There's probably a clear way to do all of this below
|
109
106
|
|
@@ -118,7 +115,6 @@ module Langchain::Tool
|
|
118
115
|
else
|
119
116
|
primary_key_columns << column[0]
|
120
117
|
end
|
121
|
-
schema << " COMMENT '#{column[1][:comment]}'" if column[1][:comment]
|
122
118
|
schema << ",\n" unless column == db.schema(table).last && primary_key_column_count == 1
|
123
119
|
end
|
124
120
|
if primary_key_column_count > 1
|
@@ -130,8 +126,6 @@ module Langchain::Tool
|
|
130
126
|
schema << ",\n" unless fk == db.foreign_key_list(table).last
|
131
127
|
end
|
132
128
|
schema << ");\n"
|
133
|
-
|
134
|
-
tool_response(content: schema)
|
135
129
|
end
|
136
130
|
end
|
137
131
|
end
|
@@ -24,22 +24,21 @@ module Langchain::Tool
|
|
24
24
|
end
|
25
25
|
|
26
26
|
def list_directory(directory_path:)
|
27
|
-
|
27
|
+
Dir.entries(directory_path)
|
28
28
|
rescue Errno::ENOENT
|
29
|
-
|
29
|
+
"No such directory: #{directory_path}"
|
30
30
|
end
|
31
31
|
|
32
32
|
def read_file(file_path:)
|
33
|
-
|
33
|
+
File.read(file_path)
|
34
34
|
rescue Errno::ENOENT
|
35
|
-
|
35
|
+
"No such file: #{file_path}"
|
36
36
|
end
|
37
37
|
|
38
38
|
def write_to_file(file_path:, content:)
|
39
39
|
File.write(file_path, content)
|
40
|
-
tool_response(content: "File written successfully")
|
41
40
|
rescue Errno::EACCES
|
42
|
-
|
41
|
+
"Permission denied: #{file_path}"
|
43
42
|
end
|
44
43
|
end
|
45
44
|
end
|
@@ -36,7 +36,7 @@ module Langchain::Tool
|
|
36
36
|
# Executes Google Search and returns the result
|
37
37
|
#
|
38
38
|
# @param input [String] search query
|
39
|
-
# @return [
|
39
|
+
# @return [String] Answer
|
40
40
|
def execute(input:)
|
41
41
|
Langchain.logger.debug("#{self.class} - Executing \"#{input}\"")
|
42
42
|
|
@@ -44,31 +44,31 @@ module Langchain::Tool
|
|
44
44
|
|
45
45
|
answer_box = results[:answer_box_list] ? results[:answer_box_list].first : results[:answer_box]
|
46
46
|
if answer_box
|
47
|
-
return
|
47
|
+
return answer_box[:result] ||
|
48
48
|
answer_box[:answer] ||
|
49
49
|
answer_box[:snippet] ||
|
50
50
|
answer_box[:snippet_highlighted_words] ||
|
51
|
-
answer_box.reject { |_k, v| v.is_a?(Hash) || v.is_a?(Array) || v.start_with?("http") }
|
51
|
+
answer_box.reject { |_k, v| v.is_a?(Hash) || v.is_a?(Array) || v.start_with?("http") }
|
52
52
|
elsif (events_results = results[:events_results])
|
53
|
-
return
|
53
|
+
return events_results.take(10)
|
54
54
|
elsif (sports_results = results[:sports_results])
|
55
|
-
return
|
55
|
+
return sports_results
|
56
56
|
elsif (top_stories = results[:top_stories])
|
57
|
-
return
|
57
|
+
return top_stories
|
58
58
|
elsif (news_results = results[:news_results])
|
59
|
-
return
|
59
|
+
return news_results
|
60
60
|
elsif (jobs_results = results.dig(:jobs_results, :jobs))
|
61
|
-
return
|
61
|
+
return jobs_results
|
62
62
|
elsif (shopping_results = results[:shopping_results]) && shopping_results.first.key?(:title)
|
63
|
-
return
|
63
|
+
return shopping_results.take(3)
|
64
64
|
elsif (questions_and_answers = results[:questions_and_answers])
|
65
|
-
return
|
65
|
+
return questions_and_answers
|
66
66
|
elsif (popular_destinations = results.dig(:popular_destinations, :destinations))
|
67
|
-
return
|
67
|
+
return popular_destinations
|
68
68
|
elsif (top_sights = results.dig(:top_sights, :sights))
|
69
|
-
return
|
69
|
+
return top_sights
|
70
70
|
elsif (images_results = results[:images_results]) && images_results.first.key?(:thumbnail)
|
71
|
-
return
|
71
|
+
return images_results.map { |h| h[:thumbnail] }.take(10)
|
72
72
|
end
|
73
73
|
|
74
74
|
snippets = []
|
@@ -110,8 +110,8 @@ module Langchain::Tool
|
|
110
110
|
snippets << local_results
|
111
111
|
end
|
112
112
|
|
113
|
-
return
|
114
|
-
|
113
|
+
return "No good search result found" if snippets.empty?
|
114
|
+
snippets
|
115
115
|
end
|
116
116
|
|
117
117
|
#
|
@@ -57,7 +57,7 @@ module Langchain::Tool
|
|
57
57
|
# @param page_size [Integer] The number of results to return per page. 20 is the API's default, 100 is the maximum. Our default is 5.
|
58
58
|
# @param page [Integer] Use this to page through the results.
|
59
59
|
#
|
60
|
-
# @return [
|
60
|
+
# @return [String] JSON response
|
61
61
|
def get_everything(
|
62
62
|
q: nil,
|
63
63
|
search_in: nil,
|
@@ -86,8 +86,7 @@ module Langchain::Tool
|
|
86
86
|
params[:pageSize] = page_size if page_size
|
87
87
|
params[:page] = page if page
|
88
88
|
|
89
|
-
|
90
|
-
tool_response(content: response)
|
89
|
+
send_request(path: "everything", params: params)
|
91
90
|
end
|
92
91
|
|
93
92
|
# Retrieve top headlines
|
@@ -99,7 +98,7 @@ module Langchain::Tool
|
|
99
98
|
# @param page_size [Integer] The number of results to return per page. 20 is the API's default, 100 is the maximum. Our default is 5.
|
100
99
|
# @param page [Integer] Use this to page through the results.
|
101
100
|
#
|
102
|
-
# @return [
|
101
|
+
# @return [String] JSON response
|
103
102
|
def get_top_headlines(
|
104
103
|
country: nil,
|
105
104
|
category: nil,
|
@@ -118,8 +117,7 @@ module Langchain::Tool
|
|
118
117
|
params[:pageSize] = page_size if page_size
|
119
118
|
params[:page] = page if page
|
120
119
|
|
121
|
-
|
122
|
-
tool_response(content: response)
|
120
|
+
send_request(path: "top-headlines", params: params)
|
123
121
|
end
|
124
122
|
|
125
123
|
# Retrieve news sources
|
@@ -128,7 +126,7 @@ module Langchain::Tool
|
|
128
126
|
# @param language [String] The 2-letter ISO-639-1 code of the language you want to get headlines for. Possible options: ar, de, en, es, fr, he, it, nl, no, pt, ru, se, ud, zh.
|
129
127
|
# @param country [String] The 2-letter ISO 3166-1 code of the country you want to get headlines for. Possible options: ae, ar, at, au, be, bg, br, ca, ch, cn, co, cu, cz, de, eg, fr, gb, gr, hk, hu, id, ie, il, in, it, jp, kr, lt, lv, ma, mx, my, ng, nl, no, nz, ph, pl, pt, ro, rs, ru, sa, se, sg, si, sk, th, tr, tw, ua, us, ve, za.
|
130
128
|
#
|
131
|
-
# @return [
|
129
|
+
# @return [String] JSON response
|
132
130
|
def get_sources(
|
133
131
|
category: nil,
|
134
132
|
language: nil,
|
@@ -141,8 +139,7 @@ module Langchain::Tool
|
|
141
139
|
params[:category] = category if category
|
142
140
|
params[:language] = language if language
|
143
141
|
|
144
|
-
|
145
|
-
tool_response(content: response)
|
142
|
+
send_request(path: "top-headlines/sources", params: params)
|
146
143
|
end
|
147
144
|
|
148
145
|
private
|
@@ -5,7 +5,7 @@ module Langchain::Tool
|
|
5
5
|
# A tool that execute Ruby code in a sandboxed environment.
|
6
6
|
#
|
7
7
|
# Gem requirements:
|
8
|
-
# gem "safe_ruby", "~> 1.0.
|
8
|
+
# gem "safe_ruby", "~> 1.0.4"
|
9
9
|
#
|
10
10
|
# Usage:
|
11
11
|
# interpreter = Langchain::Tool::RubyCodeInterpreter.new
|
@@ -27,11 +27,11 @@ module Langchain::Tool
|
|
27
27
|
# Executes Ruby code in a sandboxes environment.
|
28
28
|
#
|
29
29
|
# @param input [String] ruby code expression
|
30
|
-
# @return [
|
30
|
+
# @return [String] Answer
|
31
31
|
def execute(input:)
|
32
32
|
Langchain.logger.debug("#{self.class} - Executing \"#{input}\"")
|
33
33
|
|
34
|
-
|
34
|
+
safe_eval(input)
|
35
35
|
end
|
36
36
|
|
37
37
|
def safe_eval(code)
|
@@ -41,7 +41,7 @@ module Langchain::Tool
|
|
41
41
|
# @param include_domains [Array<String>] A list of domains to specifically include in the search results. Default is None, which includes all domains.
|
42
42
|
# @param exclude_domains [Array<String>] A list of domains to specifically exclude from the search results. Default is None, which doesn't exclude any domains.
|
43
43
|
#
|
44
|
-
# @return [
|
44
|
+
# @return [String] The search results in JSON format.
|
45
45
|
def search(
|
46
46
|
query:,
|
47
47
|
search_depth: "basic",
|
@@ -70,7 +70,7 @@ module Langchain::Tool
|
|
70
70
|
response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == "https") do |http|
|
71
71
|
http.request(request)
|
72
72
|
end
|
73
|
-
|
73
|
+
response.body
|
74
74
|
end
|
75
75
|
end
|
76
76
|
end
|
@@ -33,10 +33,8 @@ module Langchain::Tool
|
|
33
33
|
#
|
34
34
|
# @param query [String] The query to search for
|
35
35
|
# @param k [Integer] The number of results to return
|
36
|
-
# @return [Langchain::Tool::Response] The response from the server
|
37
36
|
def similarity_search(query:, k: 4)
|
38
|
-
|
39
|
-
tool_response(content: result)
|
37
|
+
vectorsearch.similarity_search(query:, k: 4)
|
40
38
|
end
|
41
39
|
end
|
42
40
|
end
|
@@ -55,15 +55,15 @@ module Langchain::Tool
|
|
55
55
|
params = {appid: @api_key, q: [city, state_code, country_code].compact.join(","), units: units}
|
56
56
|
|
57
57
|
location_response = send_request(path: "geo/1.0/direct", params: params.except(:units))
|
58
|
-
return
|
58
|
+
return location_response if location_response.is_a?(String) # Error occurred
|
59
59
|
|
60
60
|
location = location_response.first
|
61
|
-
return
|
61
|
+
return "Location not found" unless location
|
62
62
|
|
63
63
|
params = params.merge(lat: location["lat"], lon: location["lon"]).except(:q)
|
64
64
|
weather_data = send_request(path: "data/2.5/weather", params: params)
|
65
65
|
|
66
|
-
|
66
|
+
parse_weather_response(weather_data, units)
|
67
67
|
end
|
68
68
|
|
69
69
|
def send_request(path:, params:)
|
@@ -27,13 +27,13 @@ module Langchain::Tool
|
|
27
27
|
# Executes Wikipedia API search and returns the answer
|
28
28
|
#
|
29
29
|
# @param input [String] search query
|
30
|
-
# @return [
|
30
|
+
# @return [String] Answer
|
31
31
|
def execute(input:)
|
32
32
|
Langchain.logger.debug("#{self.class} - Executing \"#{input}\"")
|
33
33
|
|
34
34
|
page = ::Wikipedia.find(input)
|
35
35
|
# It would be nice to figure out a way to provide page.content but the LLM token limit is an issue
|
36
|
-
|
36
|
+
page.summary
|
37
37
|
end
|
38
38
|
end
|
39
39
|
end
|
@@ -61,20 +61,6 @@ module Langchain::ToolDefinition
|
|
61
61
|
.downcase
|
62
62
|
end
|
63
63
|
|
64
|
-
def self.extended(base)
|
65
|
-
base.include(InstanceMethods)
|
66
|
-
end
|
67
|
-
|
68
|
-
module InstanceMethods
|
69
|
-
# Create a tool response
|
70
|
-
# @param content [String, nil] The content of the tool response
|
71
|
-
# @param image_url [String, nil] The URL of an image
|
72
|
-
# @return [Langchain::ToolResponse] The tool response
|
73
|
-
def tool_response(content: nil, image_url: nil)
|
74
|
-
Langchain::ToolResponse.new(content: content, image_url: image_url)
|
75
|
-
end
|
76
|
-
end
|
77
|
-
|
78
64
|
# Manages schemas for functions
|
79
65
|
class FunctionSchemas
|
80
66
|
def initialize(tool_name)
|
@@ -7,6 +7,7 @@ module Langchain::Vectorsearch
|
|
7
7
|
# == Available vector databases
|
8
8
|
#
|
9
9
|
# - {Langchain::Vectorsearch::Chroma}
|
10
|
+
# - {Langchain::Vectorsearch::Epsilla}
|
10
11
|
# - {Langchain::Vectorsearch::Elasticsearch}
|
11
12
|
# - {Langchain::Vectorsearch::Hnswlib}
|
12
13
|
# - {Langchain::Vectorsearch::Milvus}
|
@@ -29,6 +30,7 @@ module Langchain::Vectorsearch
|
|
29
30
|
# )
|
30
31
|
#
|
31
32
|
# # You can instantiate other supported vector databases the same way:
|
33
|
+
# epsilla = Langchain::Vectorsearch::Epsilla.new(...)
|
32
34
|
# milvus = Langchain::Vectorsearch::Milvus.new(...)
|
33
35
|
# qdrant = Langchain::Vectorsearch::Qdrant.new(...)
|
34
36
|
# pinecone = Langchain::Vectorsearch::Pinecone.new(...)
|
@@ -116,8 +116,7 @@ module Langchain::Vectorsearch
|
|
116
116
|
count = collection.count
|
117
117
|
n_results = [count, k].min
|
118
118
|
|
119
|
-
|
120
|
-
collection.query(query_embeddings: [embedding], results: n_results, where: nil, where_document: nil)
|
119
|
+
collection.query(query_embeddings: [embedding], results: n_results)
|
121
120
|
end
|
122
121
|
|
123
122
|
# Ask a question and return the answer
|
@@ -144,7 +144,7 @@ module Langchain::Vectorsearch
|
|
144
144
|
# @yield [String] Stream responses back one String at a time
|
145
145
|
# @return [String] The answer to the question
|
146
146
|
def ask(question:, k: 4, &block)
|
147
|
-
search_results = similarity_search(
|
147
|
+
search_results = similarity_search(query: question, k: k)
|
148
148
|
|
149
149
|
context = search_results.map do |result|
|
150
150
|
result[:input]
|
@@ -21,8 +21,6 @@ module Langchain::Vectorsearch
|
|
21
21
|
# @param index_name [String] The name of the Epsilla table to use
|
22
22
|
# @param llm [Object] The LLM client to use
|
23
23
|
def initialize(url:, db_name:, db_path:, index_name:, llm:)
|
24
|
-
Langchain.logger.warn "DEPRECATED: `Langchain::Vectorsearch::Epsilla` is deprecated, and will be removed in the next major version. Please use other vector storage engines."
|
25
|
-
|
26
24
|
depends_on "epsilla-ruby", req: "epsilla"
|
27
25
|
|
28
26
|
uri = URI.parse(url)
|
@@ -6,7 +6,7 @@ module Langchain::Vectorsearch
|
|
6
6
|
# The PostgreSQL vector search adapter
|
7
7
|
#
|
8
8
|
# Gem requirements:
|
9
|
-
# gem "sequel", "~> 5.
|
9
|
+
# gem "sequel", "~> 5.68.0"
|
10
10
|
# gem "pgvector", "~> 0.2"
|
11
11
|
#
|
12
12
|
# Usage:
|
@@ -51,30 +51,17 @@ module Langchain::Vectorsearch
|
|
51
51
|
# Upsert a list of texts to the index
|
52
52
|
# @param texts [Array<String>] The texts to add to the index
|
53
53
|
# @param ids [Array<Integer>] The ids of the objects to add to the index, in the same order as the texts
|
54
|
-
# @param metadata [Array<Hash>] The metadata to associate with each text, in the same order as the texts
|
55
54
|
# @return [PG::Result] The response from the database including the ids of
|
56
55
|
# the added or updated texts.
|
57
|
-
def upsert_texts(texts:, ids
|
58
|
-
|
59
|
-
|
60
|
-
data = texts.zip(ids, metadata).flat_map do |text, id, meta|
|
61
|
-
{
|
62
|
-
id: id,
|
63
|
-
content: text,
|
64
|
-
vectors: llm.embed(text: text).embedding.to_s,
|
65
|
-
namespace: namespace,
|
66
|
-
metadata: meta.to_json
|
67
|
-
}
|
56
|
+
def upsert_texts(texts:, ids:)
|
57
|
+
data = texts.zip(ids).flat_map do |(text, id)|
|
58
|
+
{id: id, content: text, vectors: llm.embed(text: text).embedding.to_s, namespace: namespace}
|
68
59
|
end
|
69
60
|
# @db[table_name.to_sym].multi_insert(data, return: :primary_key)
|
70
61
|
@db[table_name.to_sym]
|
71
62
|
.insert_conflict(
|
72
63
|
target: :id,
|
73
|
-
update: {
|
74
|
-
content: Sequel[:excluded][:content],
|
75
|
-
vectors: Sequel[:excluded][:vectors],
|
76
|
-
metadata: Sequel[:excluded][:metadata]
|
77
|
-
}
|
64
|
+
update: {content: Sequel[:excluded][:content], vectors: Sequel[:excluded][:vectors]}
|
78
65
|
)
|
79
66
|
.multi_insert(data, return: :primary_key)
|
80
67
|
end
|
@@ -82,34 +69,25 @@ module Langchain::Vectorsearch
|
|
82
69
|
# Add a list of texts to the index
|
83
70
|
# @param texts [Array<String>] The texts to add to the index
|
84
71
|
# @param ids [Array<String>] The ids to add to the index, in the same order as the texts
|
85
|
-
# @param metadata [Array<Hash>] The metadata to associate with each text, in the same order as the texts
|
86
72
|
# @return [Array<Integer>] The the ids of the added texts.
|
87
|
-
def add_texts(texts:, ids: nil
|
88
|
-
metadata = Array.new(texts.size, {}) if metadata.nil?
|
89
|
-
|
73
|
+
def add_texts(texts:, ids: nil)
|
90
74
|
if ids.nil? || ids.empty?
|
91
|
-
data = texts.
|
92
|
-
{
|
93
|
-
content: text,
|
94
|
-
vectors: llm.embed(text: text).embedding.to_s,
|
95
|
-
namespace: namespace,
|
96
|
-
metadata: meta.to_json
|
97
|
-
}
|
75
|
+
data = texts.map do |text|
|
76
|
+
{content: text, vectors: llm.embed(text: text).embedding.to_s, namespace: namespace}
|
98
77
|
end
|
99
78
|
|
100
79
|
@db[table_name.to_sym].multi_insert(data, return: :primary_key)
|
101
80
|
else
|
102
|
-
upsert_texts(texts: texts, ids: ids
|
81
|
+
upsert_texts(texts: texts, ids: ids)
|
103
82
|
end
|
104
83
|
end
|
105
84
|
|
106
85
|
# Update a list of ids and corresponding texts to the index
|
107
86
|
# @param texts [Array<String>] The texts to add to the index
|
108
87
|
# @param ids [Array<String>] The ids to add to the index, in the same order as the texts
|
109
|
-
# @param metadata [Array<Hash>] The metadata to associate with each text, in the same order as the texts
|
110
88
|
# @return [Array<Integer>] The ids of the updated texts.
|
111
|
-
def update_texts(texts:, ids
|
112
|
-
upsert_texts(texts: texts, ids: ids
|
89
|
+
def update_texts(texts:, ids:)
|
90
|
+
upsert_texts(texts: texts, ids: ids)
|
113
91
|
end
|
114
92
|
|
115
93
|
# Remove a list of texts from the index
|
@@ -129,7 +107,6 @@ module Langchain::Vectorsearch
|
|
129
107
|
text :content
|
130
108
|
column :vectors, "vector(#{vector_dimensions})"
|
131
109
|
text namespace_column.to_sym, default: nil
|
132
|
-
jsonb :metadata, default: "{}"
|
133
110
|
end
|
134
111
|
end
|
135
112
|
|
@@ -159,7 +136,6 @@ module Langchain::Vectorsearch
|
|
159
136
|
def similarity_search_by_vector(embedding:, k: 4)
|
160
137
|
db.transaction do # BEGIN
|
161
138
|
documents_model
|
162
|
-
.select(:content, :metadata)
|
163
139
|
.nearest_neighbors(:vectors, embedding, distance: operator).limit(k)
|
164
140
|
.where(namespace_column.to_sym => namespace)
|
165
141
|
end
|
data/lib/langchain/version.rb
CHANGED
data/lib/langchain.rb
CHANGED
@@ -29,6 +29,10 @@ loader.inflector.inflect(
|
|
29
29
|
|
30
30
|
loader.collapse("#{__dir__}/langchain/llm/response")
|
31
31
|
|
32
|
+
# RubyCodeInterpreter does not work with Ruby 3.3;
|
33
|
+
# https://github.com/ukutaht/safe_ruby/issues/4
|
34
|
+
loader.ignore("#{__dir__}/langchain/tool/ruby_code_interpreter") if RUBY_VERSION >= "3.3.0"
|
35
|
+
|
32
36
|
loader.setup
|
33
37
|
|
34
38
|
# Langchain.rb a is library for building LLM-backed Ruby applications. It is an abstraction layer that sits on top of the emerging AI-related tools that makes it easy for developers to consume and string those services together.
|
metadata
CHANGED
@@ -1,13 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: langchainrb
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version:
|
4
|
+
version: 1.19.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Andrei Bondarev
|
8
|
+
autorequire:
|
8
9
|
bindir: exe
|
9
10
|
cert_chain: []
|
10
|
-
date:
|
11
|
+
date: 2024-11-25 00:00:00.000000000 Z
|
11
12
|
dependencies:
|
12
13
|
- !ruby/object:Gem::Dependency
|
13
14
|
name: baran
|
@@ -23,40 +24,20 @@ dependencies:
|
|
23
24
|
- - "~>"
|
24
25
|
- !ruby/object:Gem::Version
|
25
26
|
version: 0.1.9
|
26
|
-
- !ruby/object:Gem::Dependency
|
27
|
-
name: csv
|
28
|
-
requirement: !ruby/object:Gem::Requirement
|
29
|
-
requirements:
|
30
|
-
- - ">="
|
31
|
-
- !ruby/object:Gem::Version
|
32
|
-
version: '0'
|
33
|
-
type: :runtime
|
34
|
-
prerelease: false
|
35
|
-
version_requirements: !ruby/object:Gem::Requirement
|
36
|
-
requirements:
|
37
|
-
- - ">="
|
38
|
-
- !ruby/object:Gem::Version
|
39
|
-
version: '0'
|
40
27
|
- !ruby/object:Gem::Dependency
|
41
28
|
name: json-schema
|
42
29
|
requirement: !ruby/object:Gem::Requirement
|
43
30
|
requirements:
|
44
|
-
- - "
|
31
|
+
- - "~>"
|
45
32
|
- !ruby/object:Gem::Version
|
46
33
|
version: '4'
|
47
|
-
- - "<"
|
48
|
-
- !ruby/object:Gem::Version
|
49
|
-
version: '6'
|
50
34
|
type: :runtime
|
51
35
|
prerelease: false
|
52
36
|
version_requirements: !ruby/object:Gem::Requirement
|
53
37
|
requirements:
|
54
|
-
- - "
|
38
|
+
- - "~>"
|
55
39
|
- !ruby/object:Gem::Version
|
56
40
|
version: '4'
|
57
|
-
- - "<"
|
58
|
-
- !ruby/object:Gem::Version
|
59
|
-
version: '6'
|
60
41
|
- !ruby/object:Gem::Dependency
|
61
42
|
name: zeitwerk
|
62
43
|
requirement: !ruby/object:Gem::Requirement
|
@@ -105,28 +86,28 @@ dependencies:
|
|
105
86
|
requirements:
|
106
87
|
- - "~>"
|
107
88
|
- !ruby/object:Gem::Version
|
108
|
-
version:
|
89
|
+
version: 2.7.6
|
109
90
|
type: :development
|
110
91
|
prerelease: false
|
111
92
|
version_requirements: !ruby/object:Gem::Requirement
|
112
93
|
requirements:
|
113
94
|
- - "~>"
|
114
95
|
- !ruby/object:Gem::Version
|
115
|
-
version:
|
96
|
+
version: 2.7.6
|
116
97
|
- !ruby/object:Gem::Dependency
|
117
98
|
name: pry-byebug
|
118
99
|
requirement: !ruby/object:Gem::Requirement
|
119
100
|
requirements:
|
120
101
|
- - "~>"
|
121
102
|
- !ruby/object:Gem::Version
|
122
|
-
version: 3.
|
103
|
+
version: 3.10.0
|
123
104
|
type: :development
|
124
105
|
prerelease: false
|
125
106
|
version_requirements: !ruby/object:Gem::Requirement
|
126
107
|
requirements:
|
127
108
|
- - "~>"
|
128
109
|
- !ruby/object:Gem::Version
|
129
|
-
version: 3.
|
110
|
+
version: 3.10.0
|
130
111
|
- !ruby/object:Gem::Dependency
|
131
112
|
name: yard
|
132
113
|
requirement: !ruby/object:Gem::Requirement
|
@@ -198,19 +179,19 @@ dependencies:
|
|
198
179
|
- !ruby/object:Gem::Version
|
199
180
|
version: 0.2.1
|
200
181
|
- !ruby/object:Gem::Dependency
|
201
|
-
name:
|
182
|
+
name: anthropic
|
202
183
|
requirement: !ruby/object:Gem::Requirement
|
203
184
|
requirements:
|
204
185
|
- - "~>"
|
205
186
|
- !ruby/object:Gem::Version
|
206
|
-
version: '0.
|
187
|
+
version: '0.3'
|
207
188
|
type: :development
|
208
189
|
prerelease: false
|
209
190
|
version_requirements: !ruby/object:Gem::Requirement
|
210
191
|
requirements:
|
211
192
|
- - "~>"
|
212
193
|
- !ruby/object:Gem::Version
|
213
|
-
version: '0.
|
194
|
+
version: '0.3'
|
214
195
|
- !ruby/object:Gem::Dependency
|
215
196
|
name: aws-sdk-bedrockruntime
|
216
197
|
requirement: !ruby/object:Gem::Requirement
|
@@ -281,6 +262,20 @@ dependencies:
|
|
281
262
|
- - "~>"
|
282
263
|
- !ruby/object:Gem::Version
|
283
264
|
version: 8.2.0
|
265
|
+
- !ruby/object:Gem::Dependency
|
266
|
+
name: epsilla-ruby
|
267
|
+
requirement: !ruby/object:Gem::Requirement
|
268
|
+
requirements:
|
269
|
+
- - "~>"
|
270
|
+
- !ruby/object:Gem::Version
|
271
|
+
version: 0.0.4
|
272
|
+
type: :development
|
273
|
+
prerelease: false
|
274
|
+
version_requirements: !ruby/object:Gem::Requirement
|
275
|
+
requirements:
|
276
|
+
- - "~>"
|
277
|
+
- !ruby/object:Gem::Version
|
278
|
+
version: 0.0.4
|
284
279
|
- !ruby/object:Gem::Dependency
|
285
280
|
name: eqn
|
286
281
|
requirement: !ruby/object:Gem::Requirement
|
@@ -379,6 +374,20 @@ dependencies:
|
|
379
374
|
- - "~>"
|
380
375
|
- !ruby/object:Gem::Version
|
381
376
|
version: 0.10.3
|
377
|
+
- !ruby/object:Gem::Dependency
|
378
|
+
name: llama_cpp
|
379
|
+
requirement: !ruby/object:Gem::Requirement
|
380
|
+
requirements:
|
381
|
+
- - "~>"
|
382
|
+
- !ruby/object:Gem::Version
|
383
|
+
version: 0.9.4
|
384
|
+
type: :development
|
385
|
+
prerelease: false
|
386
|
+
version_requirements: !ruby/object:Gem::Requirement
|
387
|
+
requirements:
|
388
|
+
- - "~>"
|
389
|
+
- !ruby/object:Gem::Version
|
390
|
+
version: 0.9.4
|
382
391
|
- !ruby/object:Gem::Dependency
|
383
392
|
name: nokogiri
|
384
393
|
requirement: !ruby/object:Gem::Requirement
|
@@ -553,28 +562,28 @@ dependencies:
|
|
553
562
|
requirements:
|
554
563
|
- - "~>"
|
555
564
|
- !ruby/object:Gem::Version
|
556
|
-
version: 1.0.
|
565
|
+
version: 1.0.4
|
557
566
|
type: :development
|
558
567
|
prerelease: false
|
559
568
|
version_requirements: !ruby/object:Gem::Requirement
|
560
569
|
requirements:
|
561
570
|
- - "~>"
|
562
571
|
- !ruby/object:Gem::Version
|
563
|
-
version: 1.0.
|
572
|
+
version: 1.0.4
|
564
573
|
- !ruby/object:Gem::Dependency
|
565
574
|
name: sequel
|
566
575
|
requirement: !ruby/object:Gem::Requirement
|
567
576
|
requirements:
|
568
577
|
- - "~>"
|
569
578
|
- !ruby/object:Gem::Version
|
570
|
-
version: 5.
|
579
|
+
version: 5.68.0
|
571
580
|
type: :development
|
572
581
|
prerelease: false
|
573
582
|
version_requirements: !ruby/object:Gem::Requirement
|
574
583
|
requirements:
|
575
584
|
- - "~>"
|
576
585
|
- !ruby/object:Gem::Version
|
577
|
-
version: 5.
|
586
|
+
version: 5.68.0
|
578
587
|
- !ruby/object:Gem::Dependency
|
579
588
|
name: weaviate-ruby
|
580
589
|
requirement: !ruby/object:Gem::Requirement
|
@@ -695,7 +704,6 @@ files:
|
|
695
704
|
- lib/langchain/loader.rb
|
696
705
|
- lib/langchain/output_parsers/base.rb
|
697
706
|
- lib/langchain/output_parsers/output_fixing_parser.rb
|
698
|
-
- lib/langchain/output_parsers/output_parser_exception.rb
|
699
707
|
- lib/langchain/output_parsers/prompts/naive_fix_prompt.yaml
|
700
708
|
- lib/langchain/output_parsers/structured_output_parser.rb
|
701
709
|
- lib/langchain/processors/base.rb
|
@@ -727,7 +735,6 @@ files:
|
|
727
735
|
- lib/langchain/tool/weather.rb
|
728
736
|
- lib/langchain/tool/wikipedia.rb
|
729
737
|
- lib/langchain/tool_definition.rb
|
730
|
-
- lib/langchain/tool_response.rb
|
731
738
|
- lib/langchain/utils/cosine_similarity.rb
|
732
739
|
- lib/langchain/utils/hash_transformer.rb
|
733
740
|
- lib/langchain/utils/image_wrapper.rb
|
@@ -754,6 +761,7 @@ metadata:
|
|
754
761
|
source_code_uri: https://github.com/patterns-ai-core/langchainrb
|
755
762
|
changelog_uri: https://github.com/patterns-ai-core/langchainrb/blob/main/CHANGELOG.md
|
756
763
|
documentation_uri: https://rubydoc.info/gems/langchainrb
|
764
|
+
post_install_message:
|
757
765
|
rdoc_options: []
|
758
766
|
require_paths:
|
759
767
|
- lib
|
@@ -768,7 +776,8 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
768
776
|
- !ruby/object:Gem::Version
|
769
777
|
version: '0'
|
770
778
|
requirements: []
|
771
|
-
rubygems_version: 3.
|
779
|
+
rubygems_version: 3.5.20
|
780
|
+
signing_key:
|
772
781
|
specification_version: 4
|
773
782
|
summary: Build LLM-backed Ruby applications with Ruby's Langchain.rb
|
774
783
|
test_files: []
|
@@ -1,24 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Langchain
|
4
|
-
# ToolResponse represents the standardized output of a tool.
|
5
|
-
# It can contain either text content or an image URL.
|
6
|
-
class ToolResponse
|
7
|
-
attr_reader :content, :image_url
|
8
|
-
|
9
|
-
# Initializes a new ToolResponse.
|
10
|
-
#
|
11
|
-
# @param content [String] The text content of the response.
|
12
|
-
# @param image_url [String, nil] Optional URL to an image.
|
13
|
-
def initialize(content: nil, image_url: nil)
|
14
|
-
raise ArgumentError, "Either content or image_url must be provided" if content.nil? && image_url.nil?
|
15
|
-
|
16
|
-
@content = content
|
17
|
-
@image_url = image_url
|
18
|
-
end
|
19
|
-
|
20
|
-
def to_s
|
21
|
-
content.to_s
|
22
|
-
end
|
23
|
-
end
|
24
|
-
end
|