foobara-llm-backed-command 0.0.8 → 0.0.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 1a99e4dfeadb80533aa7e5206636a4c5950600730d9851b958c8d7fee30fa6b2
4
- data.tar.gz: c552bc618a58e71b7613b9d405273843fc92f949f97c35cae356eb2892e90fc4
3
+ metadata.gz: ad9f2ebc5f7f9277ed85e0f8cc410470a8ee00d9b6d88c8afb8dabab1bd604fb
4
+ data.tar.gz: 1076fec92769e65d72a4674e77989520f562e0c5c6f432afe4a11cab5e38a853
5
5
  SHA512:
6
- metadata.gz: 37440535a4b5a690efdb7f130e8a88f87e31ee78899316c7a0d1f35410d5f6f50059a853a21e1fccebd9c834b1ac34068bf4f68a4fe85fe89cf1ad055f502d57
7
- data.tar.gz: 9b09abf6b2ed06f6fb274aac373f4df0a701d8ba5ea6d4257860b93e1bb48f3a673324a808ee354988c5fa7ae13603cfe3efd3d095c5f5d82460ecf84d5ccf20
6
+ metadata.gz: 03e6f1c662e353803a097cc675e8f71d8911d02036fa668a66f64dbd6940b7c48125d538dd975ae80ff8348a9f43cfdecd602bb0a4393fd8c0d27f9871c31e1c
7
+ data.tar.gz: 99f9120528785f7260d896967c4d9737e3ebdd75967aec9432e20ce9a3dbed5f85a1d6ebd1a254a23e370562e4ab4f44825f8b662ce3b7732abb0068d2c6e706
data/CHANGELOG.md CHANGED
@@ -1,3 +1,11 @@
1
+ ## [0.0.10] - 2025-07-06
2
+
3
+ - Handle bizarre deepseek-r1 stranded </think> tag
4
+
5
+ ## [0.0.9] - 2025-06-28
6
+
7
+ - Use temperature and replace Ask with GenerateNextMessage
8
+
1
9
  ## [0.0.8] - 2025-06-19
2
10
 
3
11
  - Convert result type entities to their primary key types with a description
@@ -13,4 +13,4 @@ end
13
13
 
14
14
  Foobara::Util.require_directory "#{__dir__}/../../src"
15
15
 
16
- Foobara::Monorepo.project "llm_backed_command", project_path: "#{__dir__}/../../"
16
+ Foobara.project "llm_backed_command", project_path: "#{__dir__}/../../"
@@ -10,7 +10,7 @@ module Foobara
10
10
  include Concern
11
11
 
12
12
  on_include do
13
- depends_on Ai::AnswerBot::Ask
13
+ depends_on Ai::AnswerBot::GenerateNextMessage
14
14
  possible_error :could_not_parse_result_json,
15
15
  message: "Could not parse answer",
16
16
  context: {
@@ -21,14 +21,14 @@ module Foobara
21
21
 
22
22
  def execute
23
23
  determine_serializer
24
- construct_input_json
24
+ construct_messages
25
25
  generate_answer
26
26
  parse_answer
27
27
 
28
28
  parsed_answer
29
29
  end
30
30
 
31
- attr_accessor :serializer, :input_json, :answer, :parsed_answer
31
+ attr_accessor :serializer, :answer, :parsed_answer, :messages
32
32
 
33
33
  def determine_serializer
34
34
  depth = if respond_to?(:association_depth)
@@ -56,24 +56,48 @@ module Foobara
56
56
  self.serializer = serializer.new
57
57
  end
58
58
 
59
- def construct_input_json
60
- inputs_without_llm_integration_inputs = inputs.except(:llm_model, :association_depth)
61
- input_json = serializer.serialize(inputs_without_llm_integration_inputs)
62
-
63
- self.input_json = JSON.fast_generate(input_json)
64
- end
65
-
66
59
  def generate_answer
67
- ask_inputs = {
68
- instructions: llm_instructions,
69
- question: input_json
60
+ inputs = {
61
+ chat: Ai::AnswerBot::Types::Chat.new(messages:)
70
62
  }
71
63
 
64
+ inputs[:temperature] = if respond_to?(:temperature)
65
+ temperature
66
+ end || 0
67
+
72
68
  if respond_to?(:llm_model)
73
- ask_inputs[:model] = llm_model
69
+ inputs[:model] = llm_model
70
+ end
71
+
72
+ message = run_subcommand!(Ai::AnswerBot::GenerateNextMessage, inputs)
73
+
74
+ self.answer = message.content
75
+ end
76
+
77
+ def construct_messages
78
+ self.messages = build_messages.map do |message|
79
+ content = message[:content]
80
+
81
+ if content.is_a?(String)
82
+ message
83
+ else
84
+ content = serializer.serialize(content)
85
+ message.merge(content: JSON.fast_generate(content))
86
+ end
74
87
  end
88
+ end
75
89
 
76
- self.answer = run_subcommand!(Ai::AnswerBot::Ask, ask_inputs)
90
+ def build_messages
91
+ [
92
+ {
93
+ role: :system,
94
+ content: llm_instructions
95
+ },
96
+ {
97
+ role: :user,
98
+ content: inputs.except(:llm_model, :association_depth)
99
+ }
100
+ ]
77
101
  end
78
102
 
79
103
  def llm_instructions
@@ -82,6 +106,9 @@ module Foobara
82
106
 
83
107
  def parse_answer
84
108
  stripped_answer = answer.gsub(/<THINK>.*?<\/THINK>/mi, "")
109
+ # For some reason sometimes deepseek-r1:32b starts the answer with "\n\n</think>\n\n"
110
+ # so removing it as a special case
111
+ stripped_answer = stripped_answer.gsub(/\A\s*<\/think>\s*/mi, "")
85
112
  fencepostless_answer = stripped_answer.gsub(/^\s*```\w*\n(.*)```\s*\z/m, "\\1")
86
113
 
87
114
  # TODO: should we verify against json-schema or no?
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: foobara-llm-backed-command
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.8
4
+ version: 0.0.10
5
5
  platform: ruby
6
6
  authors:
7
7
  - Miles Georgi