foobara-llm-backed-command 0.0.8 → 0.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +4 -0
- data/src/llm_backed_execute_method.rb +39 -15
- metadata +1 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: dd38cd90e786b6c0da9b5050bc524db798c11b5621201eff0510b5264d00c240
|
4
|
+
data.tar.gz: a1529aaac899adda0831d947e982affc9420bbdcfbce53d12677fd09a6fcab4a
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 38df4ecf874a19a014329d5ecf3787f33fc0b70bc73c727de70220bda27312187bc43e1d1752177a58076c17fbb0f4003375f988f0b39a32117fd5bc91825344
|
7
|
+
data.tar.gz: 1c1ff92542023066c4ebabd4da9b417bfb715494f8ecf12a988072e84b41ddd9f300484eb9302ee47bf27237e320f4dc88a186ddf0a8fbb8e741d99aaaab08ae
|
data/CHANGELOG.md
CHANGED
@@ -10,7 +10,7 @@ module Foobara
|
|
10
10
|
include Concern
|
11
11
|
|
12
12
|
on_include do
|
13
|
-
depends_on Ai::AnswerBot::
|
13
|
+
depends_on Ai::AnswerBot::GenerateNextMessage
|
14
14
|
possible_error :could_not_parse_result_json,
|
15
15
|
message: "Could not parse answer",
|
16
16
|
context: {
|
@@ -21,14 +21,14 @@ module Foobara
|
|
21
21
|
|
22
22
|
def execute
|
23
23
|
determine_serializer
|
24
|
-
|
24
|
+
construct_messages
|
25
25
|
generate_answer
|
26
26
|
parse_answer
|
27
27
|
|
28
28
|
parsed_answer
|
29
29
|
end
|
30
30
|
|
31
|
-
attr_accessor :serializer, :
|
31
|
+
attr_accessor :serializer, :answer, :parsed_answer, :messages
|
32
32
|
|
33
33
|
def determine_serializer
|
34
34
|
depth = if respond_to?(:association_depth)
|
@@ -56,24 +56,48 @@ module Foobara
|
|
56
56
|
self.serializer = serializer.new
|
57
57
|
end
|
58
58
|
|
59
|
-
def construct_input_json
|
60
|
-
inputs_without_llm_integration_inputs = inputs.except(:llm_model, :association_depth)
|
61
|
-
input_json = serializer.serialize(inputs_without_llm_integration_inputs)
|
62
|
-
|
63
|
-
self.input_json = JSON.fast_generate(input_json)
|
64
|
-
end
|
65
|
-
|
66
59
|
def generate_answer
|
67
|
-
|
68
|
-
|
69
|
-
question: input_json
|
60
|
+
inputs = {
|
61
|
+
chat: Ai::AnswerBot::Types::Chat.new(messages:)
|
70
62
|
}
|
71
63
|
|
64
|
+
inputs[:temperature] = if respond_to?(:temperature)
|
65
|
+
temperature
|
66
|
+
end || 0
|
67
|
+
|
72
68
|
if respond_to?(:llm_model)
|
73
|
-
|
69
|
+
inputs[:model] = llm_model
|
70
|
+
end
|
71
|
+
|
72
|
+
message = run_subcommand!(Ai::AnswerBot::GenerateNextMessage, inputs)
|
73
|
+
|
74
|
+
self.answer = message.content
|
75
|
+
end
|
76
|
+
|
77
|
+
def construct_messages
|
78
|
+
self.messages = build_messages.map do |message|
|
79
|
+
content = message[:content]
|
80
|
+
|
81
|
+
if content.is_a?(String)
|
82
|
+
message
|
83
|
+
else
|
84
|
+
content = serializer.serialize(content)
|
85
|
+
message.merge(content: JSON.fast_generate(content))
|
86
|
+
end
|
74
87
|
end
|
88
|
+
end
|
75
89
|
|
76
|
-
|
90
|
+
def build_messages
|
91
|
+
[
|
92
|
+
{
|
93
|
+
role: :system,
|
94
|
+
content: llm_instructions
|
95
|
+
},
|
96
|
+
{
|
97
|
+
role: :user,
|
98
|
+
content: inputs.except(:llm_model, :association_depth)
|
99
|
+
}
|
100
|
+
]
|
77
101
|
end
|
78
102
|
|
79
103
|
def llm_instructions
|