aia 0.9.8 → 0.9.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.version +1 -1
- data/CHANGELOG.md +15 -0
- data/README.md +62 -5
- data/Rakefile +16 -8
- data/examples/directives/ask.rb +21 -0
- data/examples/tools/edit_file.rb +2 -0
- data/examples/tools/incomplete/calculator_tool.rb +70 -0
- data/examples/tools/incomplete/composite_analysis_tool.rb +89 -0
- data/examples/tools/incomplete/data_science_kit.rb +128 -0
- data/examples/tools/incomplete/database_query_tool.rb +100 -0
- data/examples/tools/incomplete/devops_toolkit.rb +112 -0
- data/examples/tools/incomplete/error_handling_tool.rb +109 -0
- data/examples/tools/{pdf_page_reader.rb → incomplete/pdf_page_reader.rb} +2 -0
- data/examples/tools/incomplete/secure_tool_template.rb +117 -0
- data/examples/tools/incomplete/weather_tool.rb +110 -0
- data/examples/tools/incomplete/workflow_manager_tool.rb +145 -0
- data/examples/tools/list_files.rb +2 -0
- data/examples/tools/mcp/README.md +1 -0
- data/examples/tools/mcp/github_mcp_server.rb +41 -0
- data/examples/tools/mcp/imcp.rb +15 -0
- data/examples/tools/read_file.rb +2 -0
- data/examples/tools/run_shell_command.rb +2 -0
- data/justfile +3 -25
- data/lib/aia/chat_processor_service.rb +0 -3
- data/lib/aia/config.rb +542 -436
- data/lib/aia/context_manager.rb +3 -8
- data/lib/aia/directive_processor.rb +21 -10
- data/lib/aia/ruby_llm_adapter.rb +78 -10
- data/lib/aia/session.rb +187 -138
- data/lib/aia/ui_presenter.rb +7 -5
- data/lib/aia/utility.rb +26 -6
- data/lib/aia.rb +5 -1
- data/main.just +3 -25
- metadata +31 -12
- data/lib/aia/shell_command_executor.rb +0 -109
data/lib/aia/context_manager.rb
CHANGED
@@ -26,14 +26,9 @@ module AIA
|
|
26
26
|
# @param system_prompt [String, nil] The system prompt to potentially prepend.
|
27
27
|
# @return [Array<Hash>] The conversation context array.
|
28
28
|
def get_context(system_prompt: nil)
|
29
|
-
#
|
30
|
-
if
|
31
|
-
|
32
|
-
(
|
33
|
-
@context.empty? ||
|
34
|
-
@context.first[:role] != 'system'
|
35
|
-
)
|
36
|
-
add_system_prompt(system_prompt)
|
29
|
+
# Add or replace system prompt if provided and not empty
|
30
|
+
if system_prompt && !system_prompt.strip.empty?
|
31
|
+
add_system_prompt(system_prompt)
|
37
32
|
end
|
38
33
|
@context
|
39
34
|
end
|
@@ -162,19 +162,19 @@ module AIA
|
|
162
162
|
spaces = " "*indent
|
163
163
|
width = TTY::Screen.width - indent - 2
|
164
164
|
|
165
|
-
if
|
165
|
+
if AIA.config.tools.empty?
|
166
|
+
puts "No tools are available"
|
167
|
+
else
|
166
168
|
puts
|
167
169
|
puts "Available Tools"
|
168
170
|
puts "==============="
|
169
171
|
|
170
|
-
AIA.config.tools.
|
171
|
-
|
172
|
-
puts "\n#{
|
173
|
-
puts "-"*
|
174
|
-
puts WordWrapper::MinimumRaggedness.new(width,
|
172
|
+
AIA.config.tools.each do |tool|
|
173
|
+
name = tool.respond_to?(:name) ? tool.name : tool.class.name
|
174
|
+
puts "\n#{name}"
|
175
|
+
puts "-"*name.size
|
176
|
+
puts WordWrapper::MinimumRaggedness.new(width, tool.description).wrap.split("\n").map{|s| spaces+s+"\n"}.join
|
175
177
|
end
|
176
|
-
else
|
177
|
-
puts "No tools configured"
|
178
178
|
end
|
179
179
|
puts
|
180
180
|
|
@@ -260,7 +260,15 @@ module AIA
|
|
260
260
|
|
261
261
|
desc "Shortcut for //config model _and_ //config model = value"
|
262
262
|
def model(args, context_manager=nil)
|
263
|
-
|
263
|
+
if args.empty?
|
264
|
+
puts
|
265
|
+
puts AIA.config.client.model.to_h.pretty_inspect
|
266
|
+
puts
|
267
|
+
else
|
268
|
+
send(:config, args.prepend('model'), context_manager)
|
269
|
+
end
|
270
|
+
|
271
|
+
return ''
|
264
272
|
end
|
265
273
|
|
266
274
|
desc "Shortcut for //config temperature _and_ //config temperature = value"
|
@@ -345,9 +353,12 @@ module AIA
|
|
345
353
|
counter = 0
|
346
354
|
|
347
355
|
RubyLLM.models.all.each do |llm|
|
356
|
+
cw = llm.context_window
|
357
|
+
caps = llm.capabilities.join(',')
|
348
358
|
inputs = llm.modalities.input.join(',')
|
349
359
|
outputs = llm.modalities.output.join(',')
|
350
|
-
|
360
|
+
mode = "#{inputs} to #{outputs}"
|
361
|
+
entry = "- #{llm.id} (#{llm.provider}) cw: #{cw} mode: #{mode} caps: #{caps}"
|
351
362
|
|
352
363
|
if query.nil? || query.empty?
|
353
364
|
counter += 1
|
data/lib/aia/ruby_llm_adapter.rb
CHANGED
@@ -66,31 +66,81 @@ module AIA
|
|
66
66
|
end
|
67
67
|
end
|
68
68
|
|
69
|
+
|
69
70
|
def setup_chat_with_tools
|
70
71
|
begin
|
71
|
-
@chat
|
72
|
+
@chat = RubyLLM.chat(model: @model)
|
73
|
+
@model = @chat.model.name if @model.nil? # using default model
|
72
74
|
rescue => e
|
73
75
|
STDERR.puts "ERROR: #{e.message}"
|
74
76
|
exit 1
|
75
77
|
end
|
76
78
|
|
77
|
-
|
79
|
+
unless @chat.model.supports_functions?
|
80
|
+
AIA.config.tools = []
|
81
|
+
AIA.config.tool_names = ""
|
82
|
+
return
|
83
|
+
end
|
84
|
+
|
85
|
+
load_tools
|
78
86
|
|
79
|
-
|
80
|
-
|
81
|
-
|
87
|
+
@chat.with_tools(*tools) unless tools.empty?
|
88
|
+
end
|
89
|
+
|
90
|
+
|
91
|
+
def load_tools
|
92
|
+
@tools = []
|
93
|
+
|
94
|
+
support_local_tools
|
95
|
+
support_mcp
|
96
|
+
filter_tools_by_allowed_list
|
97
|
+
filter_tools_by_rejected_list
|
98
|
+
drop_duplicate_tools
|
99
|
+
|
100
|
+
if tools.empty?
|
101
|
+
AIA.config.tool_names = ""
|
102
|
+
else
|
103
|
+
AIA.config.tool_names = @tools.map(&:name).join(', ')
|
104
|
+
AIA.config.tools = @tools
|
82
105
|
end
|
106
|
+
end
|
107
|
+
|
83
108
|
|
84
|
-
|
109
|
+
def support_local_tools
|
110
|
+
@tools += ObjectSpace.each_object(Class).select do |klass|
|
85
111
|
klass < RubyLLM::Tool
|
86
112
|
end
|
113
|
+
end
|
114
|
+
|
87
115
|
|
88
|
-
|
89
|
-
|
90
|
-
|
116
|
+
def support_mcp
|
117
|
+
RubyLLM::MCP.establish_connection
|
118
|
+
@tools += RubyLLM::MCP.tools
|
119
|
+
rescue => e
|
120
|
+
STDERR.puts "Warning: Failed to connect MCP clients: #{e.message}"
|
121
|
+
end
|
122
|
+
|
123
|
+
|
124
|
+
def drop_duplicate_tools
|
125
|
+
seen_names = Set.new
|
126
|
+
original_size = @tools.size
|
127
|
+
|
128
|
+
@tools.select! do |tool|
|
129
|
+
tool_name = tool.name
|
130
|
+
if seen_names.include?(tool_name)
|
131
|
+
STDERR.puts "WARNING: Duplicate tool name detected: '#{tool_name}'. Only the first occurrence will be used."
|
132
|
+
false
|
133
|
+
else
|
134
|
+
seen_names.add(tool_name)
|
135
|
+
true
|
136
|
+
end
|
91
137
|
end
|
138
|
+
|
139
|
+
removed_count = original_size - @tools.size
|
140
|
+
STDERR.puts "Removed #{removed_count} duplicate tools" if removed_count > 0
|
92
141
|
end
|
93
142
|
|
143
|
+
|
94
144
|
# TODO: Need to rethink this dispatcher pattern w/r/t RubyLLM's capabilities
|
95
145
|
# This code was originally designed for AiClient
|
96
146
|
#
|
@@ -117,7 +167,7 @@ module AIA
|
|
117
167
|
end
|
118
168
|
|
119
169
|
def transcribe(audio_file)
|
120
|
-
@chat.ask("Transcribe this audio", with: audio_file)
|
170
|
+
@chat.ask("Transcribe this audio", with: audio_file).content
|
121
171
|
end
|
122
172
|
|
123
173
|
def speak(text)
|
@@ -195,6 +245,24 @@ module AIA
|
|
195
245
|
|
196
246
|
private
|
197
247
|
|
248
|
+
def filter_tools_by_allowed_list
|
249
|
+
return if AIA.config.allowed_tools.nil?
|
250
|
+
|
251
|
+
@tools.select! do |tool|
|
252
|
+
tool_name = tool.respond_to?(:name) ? tool.name : tool.class.name
|
253
|
+
AIA.config.allowed_tools.any? { |allowed| tool_name.include?(allowed) }
|
254
|
+
end
|
255
|
+
end
|
256
|
+
|
257
|
+
def filter_tools_by_rejected_list
|
258
|
+
return if AIA.config.rejected_tools.nil?
|
259
|
+
|
260
|
+
@tools.reject! do |tool|
|
261
|
+
tool_name = tool.respond_to?(:name) ? tool.name : tool.class.name
|
262
|
+
AIA.config.rejected_tools.any? { |rejected| tool_name.include?(rejected) }
|
263
|
+
end
|
264
|
+
end
|
265
|
+
|
198
266
|
def extract_model_parts
|
199
267
|
parts = AIA.config.model.split('/')
|
200
268
|
parts.map!(&:strip)
|
data/lib/aia/session.rb
CHANGED
@@ -22,26 +22,36 @@ module AIA
|
|
22
22
|
|
23
23
|
def initialize(prompt_handler)
|
24
24
|
@prompt_handler = prompt_handler
|
25
|
-
@chat_prompt_id = nil
|
26
|
-
@include_context_flag = true
|
25
|
+
@chat_prompt_id = nil
|
26
|
+
@include_context_flag = true
|
27
|
+
|
28
|
+
setup_prompt_and_history_manager
|
29
|
+
initialize_components
|
30
|
+
setup_output_file
|
31
|
+
end
|
27
32
|
|
33
|
+
def setup_prompt_and_history_manager
|
28
34
|
# Special handling for chat mode with context files but no prompt ID
|
29
|
-
if AIA.chat? && AIA.config.prompt_id.empty? && AIA.config.context_files && !AIA.config.context_files.empty?
|
35
|
+
if AIA.chat? && (AIA.config.prompt_id.nil? || AIA.config.prompt_id.empty?) && AIA.config.context_files && !AIA.config.context_files.empty?
|
30
36
|
prompt_instance = nil
|
31
37
|
@history_manager = nil
|
32
|
-
elsif AIA.chat? && AIA.config.prompt_id.empty?
|
38
|
+
elsif AIA.chat? && (AIA.config.prompt_id.nil? || AIA.config.prompt_id.empty?)
|
33
39
|
prompt_instance = nil
|
34
40
|
@history_manager = nil
|
35
41
|
else
|
36
42
|
prompt_instance = @prompt_handler.get_prompt(AIA.config.prompt_id)
|
37
43
|
@history_manager = HistoryManager.new(prompt: prompt_instance)
|
38
44
|
end
|
45
|
+
end
|
39
46
|
|
47
|
+
def initialize_components
|
40
48
|
@context_manager = ContextManager.new(system_prompt: AIA.config.system_prompt)
|
41
49
|
@ui_presenter = UIPresenter.new
|
42
50
|
@directive_processor = DirectiveProcessor.new
|
43
51
|
@chat_processor = ChatProcessorService.new(@ui_presenter, @directive_processor)
|
52
|
+
end
|
44
53
|
|
54
|
+
def setup_output_file
|
45
55
|
if AIA.config.out_file && !AIA.config.out_file.nil? && !AIA.append? && File.exist?(AIA.config.out_file)
|
46
56
|
File.open(AIA.config.out_file, "w") { } # Truncate the file
|
47
57
|
end
|
@@ -95,37 +105,44 @@ module AIA
|
|
95
105
|
|
96
106
|
# Process a single prompt with all its requirements
|
97
107
|
def process_single_prompt(prompt_id)
|
98
|
-
role_id = AIA.config.role
|
99
|
-
|
100
108
|
# Skip empty prompt IDs
|
101
109
|
return if prompt_id.nil? || prompt_id.empty?
|
102
110
|
|
103
|
-
|
111
|
+
prompt = setup_prompt_processing(prompt_id)
|
112
|
+
return unless prompt
|
113
|
+
|
114
|
+
prompt_text = finalize_prompt_text(prompt)
|
115
|
+
send_prompt_and_get_response(prompt_text)
|
116
|
+
end
|
117
|
+
|
118
|
+
def setup_prompt_processing(prompt_id)
|
119
|
+
role_id = AIA.config.role
|
120
|
+
|
104
121
|
begin
|
105
122
|
prompt = @prompt_handler.get_prompt(prompt_id, role_id)
|
106
123
|
rescue StandardError => e
|
107
124
|
puts "Error processing prompt '#{prompt_id}': #{e.message}"
|
108
|
-
return
|
125
|
+
return nil
|
109
126
|
end
|
110
127
|
|
111
|
-
# Collect variable values (only when flag is true)
|
112
128
|
if @include_context_flag
|
113
129
|
collect_variable_values(prompt)
|
114
130
|
enhance_prompt_with_extras(prompt)
|
115
131
|
end
|
116
132
|
|
117
|
-
|
133
|
+
prompt
|
134
|
+
end
|
135
|
+
|
136
|
+
def finalize_prompt_text(prompt)
|
118
137
|
prompt_text = prompt.to_s
|
119
138
|
|
120
|
-
# Add context files (only when flag is true)
|
121
139
|
if @include_context_flag
|
122
140
|
prompt_text = add_context_files(prompt_text)
|
123
141
|
# SMELL: TODO? empty the AIA.config.context_files array
|
124
142
|
@include_context_flag = false
|
125
143
|
end
|
126
144
|
|
127
|
-
|
128
|
-
send_prompt_and_get_response(prompt_text)
|
145
|
+
prompt_text
|
129
146
|
end
|
130
147
|
|
131
148
|
# Collect variable values from user input
|
@@ -144,17 +161,19 @@ module AIA
|
|
144
161
|
history_values: history,
|
145
162
|
)
|
146
163
|
|
147
|
-
|
148
|
-
history.delete(value) if history.include?(value)
|
149
|
-
history << value
|
150
|
-
history.shift if history.size > HistoryManager::MAX_VARIABLE_HISTORY
|
151
|
-
|
152
|
-
variable_values[var_name] = history
|
164
|
+
variable_values[var_name] = update_variable_history(history, value)
|
153
165
|
end
|
154
166
|
|
155
167
|
prompt.parameters = variable_values
|
156
168
|
end
|
157
169
|
|
170
|
+
def update_variable_history(history, value)
|
171
|
+
history.delete(value) if history.include?(value)
|
172
|
+
history << value
|
173
|
+
history.shift if history.size > HistoryManager::MAX_VARIABLE_HISTORY
|
174
|
+
history
|
175
|
+
end
|
176
|
+
|
158
177
|
# Add terse instructions, stdin content, and executable prompt file content
|
159
178
|
def enhance_prompt_with_extras(prompt)
|
160
179
|
# Add terse instruction if needed
|
@@ -210,157 +229,187 @@ module AIA
|
|
210
229
|
# NOTE: there could have been an initial prompt sent into this session
|
211
230
|
# via a prompt_id on the command line, piped in text, or context files.
|
212
231
|
def start_chat(skip_context_files: false)
|
232
|
+
setup_chat_session
|
233
|
+
process_initial_context(skip_context_files)
|
234
|
+
handle_piped_input
|
235
|
+
run_chat_loop
|
236
|
+
ensure
|
237
|
+
@ui_presenter.display_chat_end
|
238
|
+
end
|
239
|
+
|
240
|
+
private
|
241
|
+
|
242
|
+
def setup_chat_session
|
243
|
+
initialize_chat_ui
|
244
|
+
@chat_prompt_id = generate_chat_prompt_id
|
245
|
+
create_temporary_prompt
|
246
|
+
setup_signal_handlers
|
247
|
+
create_chat_prompt_object
|
248
|
+
Reline::HISTORY.clear
|
249
|
+
end
|
250
|
+
|
251
|
+
def initialize_chat_ui
|
213
252
|
puts "\nEntering interactive chat mode..."
|
214
253
|
@ui_presenter.display_chat_header
|
254
|
+
end
|
215
255
|
|
216
|
-
|
256
|
+
def generate_chat_prompt_id
|
217
257
|
now = Time.now
|
218
|
-
|
258
|
+
"chat_#{now.strftime("%Y%m%d_%H%M%S")}"
|
259
|
+
end
|
219
260
|
|
220
|
-
|
221
|
-
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
|
261
|
+
def create_temporary_prompt
|
262
|
+
now = Time.now
|
263
|
+
PromptManager::Prompt.create(
|
264
|
+
id: @chat_prompt_id,
|
265
|
+
text: "Today's date is #{now.strftime("%Y-%m-%d")} and the current time is #{now.strftime("%H:%M:%S")}",
|
266
|
+
)
|
267
|
+
end
|
227
268
|
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
}
|
237
|
-
|
238
|
-
# Access this chat session's prompt object in order to do the dynamic things
|
239
|
-
# in follow up prompts that can be done in the batch mode like shell substitution. etc.
|
240
|
-
@chat_prompt = PromptManager::Prompt.new(
|
241
|
-
id: @chat_prompt_id,
|
242
|
-
directives_processor: @directive_processor,
|
243
|
-
erb_flag: true,
|
244
|
-
envar_flag: true,
|
245
|
-
external_binding: binding,
|
246
|
-
)
|
269
|
+
def setup_signal_handlers
|
270
|
+
session_instance = self
|
271
|
+
at_exit { session_instance.send(:cleanup_chat_prompt) }
|
272
|
+
Signal.trap("INT") {
|
273
|
+
session_instance.send(:cleanup_chat_prompt)
|
274
|
+
exit
|
275
|
+
}
|
276
|
+
end
|
247
277
|
|
248
|
-
|
278
|
+
def create_chat_prompt_object
|
279
|
+
@chat_prompt = PromptManager::Prompt.new(
|
280
|
+
id: @chat_prompt_id,
|
281
|
+
directives_processor: @directive_processor,
|
282
|
+
erb_flag: true,
|
283
|
+
envar_flag: true,
|
284
|
+
external_binding: binding,
|
285
|
+
)
|
286
|
+
end
|
249
287
|
|
250
|
-
|
251
|
-
|
252
|
-
context = AIA.config.context_files.map do |file|
|
253
|
-
File.read(file) rescue "Error reading file: #{file}"
|
254
|
-
end.join("\n\n")
|
288
|
+
def process_initial_context(skip_context_files)
|
289
|
+
return if skip_context_files || !AIA.config.context_files || AIA.config.context_files.empty?
|
255
290
|
|
256
|
-
|
257
|
-
|
258
|
-
|
291
|
+
context = AIA.config.context_files.map do |file|
|
292
|
+
File.read(file) rescue "Error reading file: #{file}"
|
293
|
+
end.join("\n\n")
|
259
294
|
|
260
|
-
|
261
|
-
@ui_presenter.display_thinking_animation
|
262
|
-
response = @chat_processor.process_prompt(@context_manager.get_context)
|
295
|
+
return if context.empty?
|
263
296
|
|
264
|
-
|
265
|
-
|
297
|
+
# Add context files content to context
|
298
|
+
@context_manager.add_to_context(role: "user", content: context)
|
266
299
|
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
271
|
-
|
272
|
-
|
300
|
+
# Process the context
|
301
|
+
@ui_presenter.display_thinking_animation
|
302
|
+
response = @chat_processor.process_prompt(@context_manager.get_context)
|
303
|
+
|
304
|
+
# Add AI response to context
|
305
|
+
@context_manager.add_to_context(role: "assistant", content: response)
|
306
|
+
|
307
|
+
# Output the response
|
308
|
+
@chat_processor.output_response(response)
|
309
|
+
@chat_processor.speak(response)
|
310
|
+
@ui_presenter.display_separator
|
311
|
+
end
|
273
312
|
|
274
|
-
|
275
|
-
|
276
|
-
original_stdin = STDIN.dup
|
277
|
-
piped_input = STDIN.read.strip
|
278
|
-
STDIN.reopen("/dev/tty")
|
313
|
+
def handle_piped_input
|
314
|
+
return if STDIN.tty?
|
279
315
|
|
280
|
-
|
281
|
-
|
282
|
-
|
316
|
+
original_stdin = STDIN.dup
|
317
|
+
piped_input = STDIN.read.strip
|
318
|
+
STDIN.reopen("/dev/tty")
|
283
319
|
|
284
|
-
|
320
|
+
return if piped_input.empty?
|
285
321
|
|
286
|
-
|
287
|
-
|
322
|
+
@chat_prompt.text = piped_input
|
323
|
+
processed_input = @chat_prompt.to_s
|
324
|
+
|
325
|
+
@context_manager.add_to_context(role: "user", content: processed_input)
|
326
|
+
|
327
|
+
@ui_presenter.display_thinking_animation
|
328
|
+
response = @chat_processor.process_prompt(@context_manager.get_context)
|
329
|
+
|
330
|
+
@context_manager.add_to_context(role: "assistant", content: response)
|
331
|
+
@chat_processor.output_response(response)
|
332
|
+
@chat_processor.speak(response) if AIA.speak?
|
333
|
+
@ui_presenter.display_separator
|
334
|
+
|
335
|
+
STDIN.reopen(original_stdin)
|
336
|
+
end
|
288
337
|
|
289
|
-
|
290
|
-
|
291
|
-
|
292
|
-
|
338
|
+
def run_chat_loop
|
339
|
+
loop do
|
340
|
+
follow_up_prompt = @ui_presenter.ask_question
|
341
|
+
|
342
|
+
break if follow_up_prompt.nil? || follow_up_prompt.strip.downcase == "exit" || follow_up_prompt.strip.empty?
|
343
|
+
|
344
|
+
if AIA.config.out_file
|
345
|
+
File.open(AIA.config.out_file, "a") do |file|
|
346
|
+
file.puts "\nYou: #{follow_up_prompt}"
|
293
347
|
end
|
348
|
+
end
|
294
349
|
|
295
|
-
|
350
|
+
if @directive_processor.directive?(follow_up_prompt)
|
351
|
+
follow_up_prompt = process_chat_directive(follow_up_prompt)
|
352
|
+
next if follow_up_prompt.nil?
|
296
353
|
end
|
297
354
|
|
298
|
-
|
299
|
-
|
300
|
-
follow_up_prompt = @ui_presenter.ask_question
|
355
|
+
@chat_prompt.text = follow_up_prompt
|
356
|
+
processed_prompt = @chat_prompt.to_s
|
301
357
|
|
302
|
-
|
358
|
+
@context_manager.add_to_context(role: "user", content: processed_prompt)
|
359
|
+
conversation = @context_manager.get_context
|
303
360
|
|
304
|
-
|
305
|
-
|
306
|
-
file.puts "\nYou: #{follow_up_prompt}"
|
307
|
-
end
|
308
|
-
end
|
361
|
+
@ui_presenter.display_thinking_animation
|
362
|
+
response = @chat_processor.process_prompt(conversation)
|
309
363
|
|
310
|
-
|
311
|
-
|
312
|
-
|
313
|
-
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
# First, clear the context manager's context
|
318
|
-
@context_manager.clear_context(keep_system_prompt: true)
|
319
|
-
|
320
|
-
# Second, try clearing the client's context
|
321
|
-
if AIA.config.client && AIA.config.client.respond_to?(:clear_context)
|
322
|
-
AIA.config.client.clear_context
|
323
|
-
end
|
324
|
-
|
325
|
-
# Third, completely reinitialize the client to ensure fresh state
|
326
|
-
# This is the most aggressive approach to ensure no context remains
|
327
|
-
begin
|
328
|
-
AIA.config.client = AIA::RubyLLMAdapter.new
|
329
|
-
rescue => e
|
330
|
-
STDERR.puts "Error reinitializing client: #{e.message}"
|
331
|
-
end
|
332
|
-
|
333
|
-
@ui_presenter.display_info("Chat context cleared.")
|
334
|
-
next
|
335
|
-
elsif directive_output.nil? || directive_output.strip.empty?
|
336
|
-
next
|
337
|
-
else
|
338
|
-
puts "\n#{directive_output}\n"
|
339
|
-
follow_up_prompt = "I executed this directive: #{follow_up_prompt}\nHere's the output: #{directive_output}\nLet's continue our conversation."
|
340
|
-
end
|
341
|
-
end
|
364
|
+
@ui_presenter.display_ai_response(response)
|
365
|
+
@context_manager.add_to_context(role: "assistant", content: response)
|
366
|
+
@chat_processor.speak(response)
|
367
|
+
|
368
|
+
@ui_presenter.display_separator
|
369
|
+
end
|
370
|
+
end
|
342
371
|
|
343
|
-
|
344
|
-
|
372
|
+
def process_chat_directive(follow_up_prompt)
|
373
|
+
directive_output = @directive_processor.process(follow_up_prompt, @context_manager)
|
374
|
+
|
375
|
+
return handle_clear_directive if follow_up_prompt.strip.start_with?("//clear")
|
376
|
+
return handle_empty_directive_output if directive_output.nil? || directive_output.strip.empty?
|
377
|
+
|
378
|
+
handle_successful_directive(follow_up_prompt, directive_output)
|
379
|
+
end
|
345
380
|
|
346
|
-
|
347
|
-
|
381
|
+
def handle_clear_directive
|
382
|
+
# The directive processor has called context_manager.clear_context
|
383
|
+
# but we need a more aggressive approach to fully clear all context
|
348
384
|
|
349
|
-
|
350
|
-
|
385
|
+
# First, clear the context manager's context
|
386
|
+
@context_manager.clear_context(keep_system_prompt: true)
|
351
387
|
|
352
|
-
|
353
|
-
|
354
|
-
|
388
|
+
# Second, try clearing the client's context
|
389
|
+
if AIA.config.client && AIA.config.client.respond_to?(:clear_context)
|
390
|
+
AIA.config.client.clear_context
|
391
|
+
end
|
355
392
|
|
356
|
-
|
357
|
-
|
358
|
-
|
359
|
-
|
393
|
+
# Third, completely reinitialize the client to ensure fresh state
|
394
|
+
# This is the most aggressive approach to ensure no context remains
|
395
|
+
begin
|
396
|
+
AIA.config.client = AIA::RubyLLMAdapter.new
|
397
|
+
rescue => e
|
398
|
+
STDERR.puts "Error reinitializing client: #{e.message}"
|
360
399
|
end
|
400
|
+
|
401
|
+
@ui_presenter.display_info("Chat context cleared.")
|
402
|
+
nil
|
361
403
|
end
|
362
404
|
|
363
|
-
|
405
|
+
def handle_empty_directive_output
|
406
|
+
nil
|
407
|
+
end
|
408
|
+
|
409
|
+
def handle_successful_directive(follow_up_prompt, directive_output)
|
410
|
+
puts "\n#{directive_output}\n"
|
411
|
+
"I executed this directive: #{follow_up_prompt}\nHere's the output: #{directive_output}\nLet's continue our conversation."
|
412
|
+
end
|
364
413
|
|
365
414
|
def cleanup_chat_prompt
|
366
415
|
if @chat_prompt_id
|
data/lib/aia/ui_presenter.rb
CHANGED
@@ -54,9 +54,9 @@ module AIA
|
|
54
54
|
line = line.chomp
|
55
55
|
|
56
56
|
# Check for code block delimiters
|
57
|
-
if line.match
|
57
|
+
if (match = line.match(/^```(\w*)$/)) && !in_code_block
|
58
58
|
in_code_block = true
|
59
|
-
language =
|
59
|
+
language = match[1]
|
60
60
|
output.puts "#{indent}```#{language}"
|
61
61
|
elsif line.match?(/^```$/) && in_code_block
|
62
62
|
in_code_block = false
|
@@ -107,9 +107,11 @@ module AIA
|
|
107
107
|
spinner = TTY::Spinner.new("[:spinner] #{spinner_message}", format: :bouncing_ball)
|
108
108
|
spinner.auto_spin
|
109
109
|
|
110
|
-
|
111
|
-
|
112
|
-
|
110
|
+
begin
|
111
|
+
result = yield
|
112
|
+
ensure
|
113
|
+
spinner.stop
|
114
|
+
end
|
113
115
|
result
|
114
116
|
else
|
115
117
|
yield
|