foobara-llm-backed-command 0.0.9 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +8 -0
- data/lib/foobara/llm_backed_command.rb +1 -1
- data/src/llm_backed_execute_method.rb +118 -65
- metadata +3 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 7747f5873ca5862ac16e10edc62f856fe9f41b480ca9537e2fac7d967b2647ba
|
4
|
+
data.tar.gz: 718b24e6f4e33bc993fd39fd043a17343ea7e7b8342b5cc1807d5b7f662f009b
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 7425c556694c8350956580d8492321b8d622c66245d0853911602229ecf894457f1bdb5c08934de8bc4a4e9bf06583983173a84f7dd563bd25f949cccfc41b19
|
7
|
+
data.tar.gz: 14847e3e1fc3de9d2f438b4a19690a018341e09d40f9ff617175ce8be2604f8abb99d6b860b8015f92136bc373b903d0c72e644e9080df1e4df3a19324d5ed58
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,11 @@
|
|
1
|
+
## [1.0.0] - 2025-07-08
|
2
|
+
|
3
|
+
- Add support for separate serializers for user and assistant messages
|
4
|
+
|
5
|
+
## [0.0.10] - 2025-07-06
|
6
|
+
|
7
|
+
- Handle bizarre deepseek-r1 stranded </think> tag
|
8
|
+
|
1
9
|
## [0.0.9] - 2025-06-28
|
2
10
|
|
3
11
|
- Use temperature and replace Ask with GenerateNextMessage
|
@@ -2,13 +2,20 @@
|
|
2
2
|
# on the class.
|
3
3
|
#
|
4
4
|
# inputs do
|
5
|
-
# association_depth :symbol, one_of:
|
5
|
+
# association_depth :symbol, one_of: AssociationDepth, default: AssociationDepth::ATOM
|
6
6
|
# llm_model :symbol, one_of: Foobara::Ai::AnswerBot::Types::ModelEnum
|
7
7
|
# end
|
8
8
|
module Foobara
|
9
9
|
module LlmBackedExecuteMethod
|
10
10
|
include Concern
|
11
11
|
|
12
|
+
LLM_INTEGRATION_KEYS = [
|
13
|
+
:llm_model,
|
14
|
+
:association_depth,
|
15
|
+
:user_association_depth,
|
16
|
+
:assistant_association_depth
|
17
|
+
].freeze
|
18
|
+
|
12
19
|
on_include do
|
13
20
|
depends_on Ai::AnswerBot::GenerateNextMessage
|
14
21
|
possible_error :could_not_parse_result_json,
|
@@ -20,7 +27,11 @@ module Foobara
|
|
20
27
|
end
|
21
28
|
|
22
29
|
def execute
|
23
|
-
|
30
|
+
determine_user_association_depth
|
31
|
+
determine_assistant_association_depth
|
32
|
+
determine_user_serializer
|
33
|
+
determine_assistant_serializer
|
34
|
+
determine_llm_instructions
|
24
35
|
construct_messages
|
25
36
|
generate_answer
|
26
37
|
parse_answer
|
@@ -28,32 +39,51 @@ module Foobara
|
|
28
39
|
parsed_answer
|
29
40
|
end
|
30
41
|
|
31
|
-
attr_accessor :
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
42
|
+
attr_accessor :answer, :parsed_answer, :messages, :assistant_serializer, :user_serializer,
|
43
|
+
:computed_assistant_association_depth, :computed_user_association_depth,
|
44
|
+
:llm_instructions
|
45
|
+
|
46
|
+
def determine_assistant_association_depth
|
47
|
+
self.computed_assistant_association_depth = if respond_to?(:assistant_association_depth)
|
48
|
+
assistant_association_depth
|
49
|
+
elsif respond_to?(:association_depth)
|
50
|
+
association_depth
|
51
|
+
else
|
52
|
+
Foobara::AssociationDepth::PRIMARY_KEY_ONLY
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
def determine_assistant_serializer
|
57
|
+
self.assistant_serializer = depth_to_serializer(computed_assistant_association_depth)
|
58
|
+
end
|
59
|
+
|
60
|
+
def determine_user_association_depth
|
61
|
+
self.computed_user_association_depth = if respond_to?(:user_association_depth)
|
62
|
+
user_association_depth
|
63
|
+
elsif respond_to?(:association_depth)
|
64
|
+
association_depth
|
65
|
+
else
|
66
|
+
Foobara::AssociationDepth::ATOM
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
def determine_user_serializer
|
71
|
+
self.user_serializer = depth_to_serializer(computed_user_association_depth)
|
72
|
+
end
|
73
|
+
|
74
|
+
def depth_to_serializer(depth)
|
75
|
+
case depth
|
76
|
+
when Foobara::AssociationDepth::ATOM
|
77
|
+
Foobara::CommandConnectors::Serializers::AtomicSerializer
|
78
|
+
when Foobara::AssociationDepth::AGGREGATE
|
79
|
+
Foobara::CommandConnectors::Serializers::AggregateSerializer
|
80
|
+
when Foobara::AssociationDepth::PRIMARY_KEY_ONLY
|
81
|
+
Foobara::CommandConnectors::Serializers::EntitiesToPrimaryKeysSerializer
|
82
|
+
else
|
83
|
+
# :nocov:
|
84
|
+
raise "Unknown depth: #{depth}"
|
85
|
+
# :nocov:
|
86
|
+
end.instance
|
57
87
|
end
|
58
88
|
|
59
89
|
def generate_answer
|
@@ -61,6 +91,7 @@ module Foobara
|
|
61
91
|
chat: Ai::AnswerBot::Types::Chat.new(messages:)
|
62
92
|
}
|
63
93
|
|
94
|
+
# NOTE: some models don't allow 0 such as o1. Manually set to 1 in calling code for such models for now.
|
64
95
|
inputs[:temperature] = if respond_to?(:temperature)
|
65
96
|
temperature
|
66
97
|
end || 0
|
@@ -81,12 +112,25 @@ module Foobara
|
|
81
112
|
if content.is_a?(String)
|
82
113
|
message
|
83
114
|
else
|
115
|
+
serializer = if message[:role] == :user
|
116
|
+
user_serializer
|
117
|
+
else
|
118
|
+
assistant_serializer
|
119
|
+
end
|
120
|
+
|
84
121
|
content = serializer.serialize(content)
|
85
122
|
message.merge(content: JSON.fast_generate(content))
|
86
123
|
end
|
87
124
|
end
|
88
125
|
end
|
89
126
|
|
127
|
+
def determine_llm_instructions
|
128
|
+
self.llm_instructions = self.class.llm_instructions(
|
129
|
+
computed_user_association_depth,
|
130
|
+
computed_assistant_association_depth
|
131
|
+
)
|
132
|
+
end
|
133
|
+
|
90
134
|
def build_messages
|
91
135
|
[
|
92
136
|
{
|
@@ -95,17 +139,16 @@ module Foobara
|
|
95
139
|
},
|
96
140
|
{
|
97
141
|
role: :user,
|
98
|
-
content: inputs.except(
|
142
|
+
content: inputs.except(*LLM_INTEGRATION_KEYS)
|
99
143
|
}
|
100
144
|
]
|
101
145
|
end
|
102
146
|
|
103
|
-
def llm_instructions
|
104
|
-
self.class.llm_instructions
|
105
|
-
end
|
106
|
-
|
107
147
|
def parse_answer
|
108
148
|
stripped_answer = answer.gsub(/<THINK>.*?<\/THINK>/mi, "")
|
149
|
+
# For some reason sometimes deepseek-r1:32b starts the answer with "\n\n</think>\n\n"
|
150
|
+
# so removing it as a special case
|
151
|
+
stripped_answer = stripped_answer.gsub(/\A\s*<\/?think>\s*/mi, "")
|
109
152
|
fencepostless_answer = stripped_answer.gsub(/^\s*```\w*\n(.*)```\s*\z/m, "\\1")
|
110
153
|
|
111
154
|
# TODO: should we verify against json-schema or no?
|
@@ -139,71 +182,81 @@ module Foobara
|
|
139
182
|
end
|
140
183
|
|
141
184
|
module ClassMethods
|
142
|
-
def inputs_json_schema
|
143
|
-
|
185
|
+
def inputs_json_schema(association_depth)
|
186
|
+
JsonSchemaGenerator.to_json_schema(
|
187
|
+
inputs_type_without_llm_integration_inputs,
|
188
|
+
association_depth:
|
189
|
+
)
|
144
190
|
end
|
145
191
|
|
146
192
|
def inputs_type_without_llm_integration_inputs
|
147
|
-
return @inputs_type_without_llm_integration_inputs if @inputs_type_without_llm_integration_inputs
|
148
|
-
|
149
193
|
type_declaration = Util.deep_dup(inputs_type.declaration_data)
|
150
194
|
|
151
195
|
element_type_declarations = type_declaration[:element_type_declarations]
|
152
196
|
|
153
197
|
changed = false
|
154
198
|
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
if element_type_declarations.key?(:association_depth)
|
161
|
-
changed = true
|
162
|
-
element_type_declarations.delete(:association_depth)
|
199
|
+
LLM_INTEGRATION_KEYS.each do |key|
|
200
|
+
if element_type_declarations.key?(key)
|
201
|
+
changed = true
|
202
|
+
element_type_declarations.delete(key)
|
203
|
+
end
|
163
204
|
end
|
164
205
|
|
165
206
|
if type_declaration.key?(:defaults)
|
166
|
-
|
167
|
-
|
168
|
-
|
207
|
+
LLM_INTEGRATION_KEYS.each do |key|
|
208
|
+
if type_declaration[:defaults].key?(key)
|
209
|
+
changed = true
|
210
|
+
type_declaration[:defaults].delete(key)
|
211
|
+
end
|
169
212
|
end
|
170
213
|
|
171
|
-
if type_declaration[:defaults].key?(:association_depth)
|
172
|
-
changed = true
|
173
|
-
type_declaration[:defaults].delete(:association_depth)
|
174
|
-
end
|
175
214
|
if type_declaration[:defaults].empty?
|
176
215
|
type_declaration.delete(:defaults)
|
177
216
|
end
|
178
217
|
end
|
179
218
|
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
219
|
+
if changed
|
220
|
+
domain.foobara_type_from_declaration(type_declaration)
|
221
|
+
else
|
222
|
+
inputs_type
|
223
|
+
end
|
185
224
|
end
|
186
225
|
|
187
|
-
def result_json_schema
|
188
|
-
|
226
|
+
def result_json_schema(association_depth)
|
227
|
+
JsonSchemaGenerator.to_json_schema(
|
189
228
|
result_type,
|
190
|
-
association_depth:
|
229
|
+
association_depth:
|
191
230
|
)
|
192
231
|
end
|
193
232
|
|
194
|
-
def llm_instructions
|
195
|
-
|
233
|
+
def llm_instructions(user_association_depth, assistant_association_depth)
|
234
|
+
key = [user_association_depth, assistant_association_depth]
|
235
|
+
|
236
|
+
@llm_instructions_cache ||= {}
|
237
|
+
|
238
|
+
if @llm_instructions_cache.key?(key)
|
239
|
+
# :nocov:
|
240
|
+
@llm_instructions_cache[key]
|
241
|
+
# :nocov:
|
242
|
+
else
|
243
|
+
@llm_instructions_cache[key] = build_llm_instructions(user_association_depth, assistant_association_depth)
|
244
|
+
end
|
245
|
+
end
|
246
|
+
|
247
|
+
def build_llm_instructions(user_association_depth, assistant_association_depth)
|
248
|
+
<<~INSTRUCTIONS
|
196
249
|
You are implementing an API for a command named #{scoped_full_name} which has the following description:
|
197
250
|
|
198
|
-
#{description}
|
251
|
+
#{description}
|
199
252
|
|
200
253
|
Here is the inputs JSON schema for the data you will receive:
|
201
254
|
|
202
|
-
#{inputs_json_schema}
|
255
|
+
#{inputs_json_schema(user_association_depth)}
|
203
256
|
|
204
257
|
Here is the result JSON schema:
|
205
258
|
|
206
|
-
#{result_json_schema}
|
259
|
+
#{result_json_schema(assistant_association_depth)}
|
207
260
|
|
208
261
|
You will receive 1 message containing only JSON data according to the inputs JSON schema above
|
209
262
|
and you will generate a JSON response that is a valid response according to the result JSON schema above.
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: foobara-llm-backed-command
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0
|
4
|
+
version: 1.0.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Miles Georgi
|
@@ -43,14 +43,14 @@ dependencies:
|
|
43
43
|
requirements:
|
44
44
|
- - "~>"
|
45
45
|
- !ruby/object:Gem::Version
|
46
|
-
version: 0.0
|
46
|
+
version: 1.0.0
|
47
47
|
type: :runtime
|
48
48
|
prerelease: false
|
49
49
|
version_requirements: !ruby/object:Gem::Requirement
|
50
50
|
requirements:
|
51
51
|
- - "~>"
|
52
52
|
- !ruby/object:Gem::Version
|
53
|
-
version: 0.0
|
53
|
+
version: 1.0.0
|
54
54
|
email:
|
55
55
|
- azimux@gmail.com
|
56
56
|
executables: []
|