ruby_llm-agents 1.3.0 → 1.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/ruby_llm/agents/base_agent.rb +5 -3
- data/lib/ruby_llm/agents/core/version.rb +1 -1
- metadata +1 -1
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: 7e22cdb85d5ac5b3d3f0236e212233464883a3ec7d2831b6651436e0e026578c
|
|
4
|
+
data.tar.gz: e3a8fd1f55bf159ea69444f01c457cded6f2cb4f04a36796d6d6ee1fdbe078a6
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: 526caa1eef7d31953417c0b6eb956657c0c05d41c2720472311ddc66afc337ec1f903e0e0e9ed550b3fe71690be4e4b8da77c2c36c08586a8000e5f971930789
|
|
7
|
+
data.tar.gz: be0e49c7f4fb56f49b94e88c67ea6a8d8baf1cd4e3c4fadca999aa063728648a2c713fda4c202053167d4676ddb405b37d0b8afd51e83c29bdb4c93fa3f9cec0
|
|
@@ -471,7 +471,7 @@ module RubyLLM
|
|
|
471
471
|
# @param context [Pipeline::Context] The execution context
|
|
472
472
|
# @return [void] Sets context.output with the result
|
|
473
473
|
def execute(context)
|
|
474
|
-
client = build_client
|
|
474
|
+
client = build_client(context)
|
|
475
475
|
response = execute_llm_call(client, context)
|
|
476
476
|
capture_response(response, context)
|
|
477
477
|
result = build_result(process_response(response), response, context)
|
|
@@ -480,10 +480,12 @@ module RubyLLM
|
|
|
480
480
|
|
|
481
481
|
# Builds and configures the RubyLLM client
|
|
482
482
|
#
|
|
483
|
+
# @param context [Pipeline::Context, nil] Optional execution context for model overrides
|
|
483
484
|
# @return [RubyLLM::Chat] Configured chat client
|
|
484
|
-
def build_client
|
|
485
|
+
def build_client(context = nil)
|
|
486
|
+
effective_model = context&.model || model
|
|
485
487
|
client = RubyLLM.chat
|
|
486
|
-
.with_model(
|
|
488
|
+
.with_model(effective_model)
|
|
487
489
|
.with_temperature(temperature)
|
|
488
490
|
|
|
489
491
|
client = client.with_instructions(system_prompt) if system_prompt
|