aia 0.9.7 → 0.9.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.config/tocer/configuration.yml +2 -1
- data/.version +1 -1
- data/CHANGELOG.md +15 -1
- data/README.md +43 -0
- data/Rakefile +16 -8
- data/examples/directives/ask.rb +21 -0
- data/examples/tools/edit_file.rb +2 -0
- data/examples/tools/incomplete/calculator_tool.rb +70 -0
- data/examples/tools/incomplete/composite_analysis_tool.rb +89 -0
- data/examples/tools/incomplete/data_science_kit.rb +128 -0
- data/examples/tools/incomplete/database_query_tool.rb +100 -0
- data/examples/tools/incomplete/devops_toolkit.rb +112 -0
- data/examples/tools/incomplete/error_handling_tool.rb +109 -0
- data/examples/tools/incomplete/pdf_page_reader.rb +32 -0
- data/examples/tools/incomplete/secure_tool_template.rb +117 -0
- data/examples/tools/incomplete/weather_tool.rb +110 -0
- data/examples/tools/incomplete/workflow_manager_tool.rb +145 -0
- data/examples/tools/list_files.rb +2 -0
- data/examples/tools/mcp/README.md +1 -0
- data/examples/tools/mcp/github_mcp_server.rb +41 -0
- data/examples/tools/mcp/imcp.rb +15 -0
- data/examples/tools/read_file.rb +2 -0
- data/examples/tools/run_shell_command.rb +2 -0
- data/lib/aia/chat_processor_service.rb +3 -26
- data/lib/aia/config.rb +542 -414
- data/lib/aia/context_manager.rb +3 -8
- data/lib/aia/directive_processor.rb +24 -11
- data/lib/aia/ruby_llm_adapter.rb +78 -10
- data/lib/aia/session.rb +313 -215
- data/lib/aia/ui_presenter.rb +7 -5
- data/lib/aia/utility.rb +26 -6
- data/lib/aia.rb +5 -1
- metadata +32 -12
- data/lib/aia/shell_command_executor.rb +0 -109
data/lib/aia/session.rb
CHANGED
@@ -1,158 +1,227 @@
|
|
1
1
|
# lib/aia/session.rb
|
2
2
|
|
3
|
-
require
|
4
|
-
require
|
5
|
-
require
|
6
|
-
require
|
7
|
-
require
|
8
|
-
require
|
9
|
-
require
|
10
|
-
require_relative
|
11
|
-
require_relative
|
12
|
-
require_relative
|
13
|
-
require_relative
|
14
|
-
require_relative
|
15
|
-
require_relative
|
16
|
-
require_relative
|
3
|
+
require "tty-spinner"
|
4
|
+
require "tty-screen"
|
5
|
+
require "reline"
|
6
|
+
require "prompt_manager"
|
7
|
+
require "json"
|
8
|
+
require "fileutils"
|
9
|
+
require "amazing_print"
|
10
|
+
require_relative "directive_processor"
|
11
|
+
require_relative "history_manager"
|
12
|
+
require_relative "context_manager"
|
13
|
+
require_relative "ui_presenter"
|
14
|
+
require_relative "chat_processor_service"
|
15
|
+
require_relative "prompt_handler"
|
16
|
+
require_relative "utility"
|
17
17
|
|
18
18
|
module AIA
|
19
19
|
class Session
|
20
20
|
KW_HISTORY_MAX = 5 # Maximum number of history entries per keyword
|
21
|
-
TERSE_PROMPT
|
21
|
+
TERSE_PROMPT = "\nKeep your response short and to the point.\n"
|
22
22
|
|
23
23
|
def initialize(prompt_handler)
|
24
|
-
@prompt_handler
|
25
|
-
@chat_prompt_id = nil
|
24
|
+
@prompt_handler = prompt_handler
|
25
|
+
@chat_prompt_id = nil
|
26
|
+
@include_context_flag = true
|
27
|
+
|
28
|
+
setup_prompt_and_history_manager
|
29
|
+
initialize_components
|
30
|
+
setup_output_file
|
31
|
+
end
|
26
32
|
|
33
|
+
def setup_prompt_and_history_manager
|
27
34
|
# Special handling for chat mode with context files but no prompt ID
|
28
|
-
if AIA.chat? && AIA.config.prompt_id.empty? && AIA.config.context_files && !AIA.config.context_files.empty?
|
29
|
-
prompt_instance
|
35
|
+
if AIA.chat? && (AIA.config.prompt_id.nil? || AIA.config.prompt_id.empty?) && AIA.config.context_files && !AIA.config.context_files.empty?
|
36
|
+
prompt_instance = nil
|
30
37
|
@history_manager = nil
|
31
|
-
elsif AIA.chat? && AIA.config.prompt_id.empty?
|
32
|
-
prompt_instance
|
38
|
+
elsif AIA.chat? && (AIA.config.prompt_id.nil? || AIA.config.prompt_id.empty?)
|
39
|
+
prompt_instance = nil
|
33
40
|
@history_manager = nil
|
34
41
|
else
|
35
|
-
prompt_instance
|
42
|
+
prompt_instance = @prompt_handler.get_prompt(AIA.config.prompt_id)
|
36
43
|
@history_manager = HistoryManager.new(prompt: prompt_instance)
|
37
44
|
end
|
45
|
+
end
|
38
46
|
|
39
|
-
|
40
|
-
@
|
47
|
+
def initialize_components
|
48
|
+
@context_manager = ContextManager.new(system_prompt: AIA.config.system_prompt)
|
49
|
+
@ui_presenter = UIPresenter.new
|
41
50
|
@directive_processor = DirectiveProcessor.new
|
42
|
-
@chat_processor
|
51
|
+
@chat_processor = ChatProcessorService.new(@ui_presenter, @directive_processor)
|
52
|
+
end
|
43
53
|
|
54
|
+
def setup_output_file
|
44
55
|
if AIA.config.out_file && !AIA.config.out_file.nil? && !AIA.append? && File.exist?(AIA.config.out_file)
|
45
|
-
File.open(AIA.config.out_file,
|
56
|
+
File.open(AIA.config.out_file, "w") { } # Truncate the file
|
46
57
|
end
|
47
58
|
end
|
48
59
|
|
49
|
-
# Starts the session, processing the
|
50
|
-
#
|
60
|
+
# Starts the session, processing all prompts in the pipeline and then
|
61
|
+
# optionally starting an interactive chat session.
|
51
62
|
def start
|
52
|
-
|
53
|
-
|
63
|
+
# Handle special chat-only cases first
|
64
|
+
if should_start_chat_immediately?
|
65
|
+
AIA::Utility.robot
|
66
|
+
start_chat
|
67
|
+
return
|
68
|
+
end
|
69
|
+
|
70
|
+
# Process all prompts in the pipeline
|
71
|
+
process_all_prompts
|
54
72
|
|
55
|
-
#
|
73
|
+
# Start chat mode after all prompts are processed
|
56
74
|
if AIA.chat?
|
57
75
|
AIA::Utility.robot
|
58
|
-
|
59
|
-
|
60
|
-
start_chat
|
61
|
-
return
|
62
|
-
elsif prompt_id.empty? && role_id.empty?
|
63
|
-
# Even with an empty prompt_id, we might have context files
|
64
|
-
start_chat
|
65
|
-
return
|
66
|
-
end
|
76
|
+
@ui_presenter.display_separator
|
77
|
+
start_chat(skip_context_files: true)
|
67
78
|
end
|
79
|
+
end
|
80
|
+
|
81
|
+
private
|
82
|
+
|
83
|
+
# Check if we should start chat immediately without processing any prompts
|
84
|
+
def should_start_chat_immediately?
|
85
|
+
return false unless AIA.chat?
|
68
86
|
|
87
|
+
# If pipeline is empty or only contains empty prompt_ids, go straight to chat
|
88
|
+
AIA.config.pipeline.empty? || AIA.config.pipeline.all? { |id| id.nil? || id.empty? }
|
89
|
+
end
|
90
|
+
|
91
|
+
# Process all prompts in the pipeline sequentially
|
92
|
+
def process_all_prompts
|
93
|
+
prompt_count = 0
|
94
|
+
total_prompts = AIA.config.pipeline.size
|
95
|
+
|
96
|
+
until AIA.config.pipeline.empty?
|
97
|
+
prompt_count += 1
|
98
|
+
prompt_id = AIA.config.pipeline.shift
|
99
|
+
|
100
|
+
puts "\n--- Processing prompt #{prompt_count}/#{total_prompts}: #{prompt_id} ---" if AIA.verbose? && total_prompts > 1
|
101
|
+
|
102
|
+
process_single_prompt(prompt_id)
|
103
|
+
end
|
104
|
+
end
|
105
|
+
|
106
|
+
# Process a single prompt with all its requirements
|
107
|
+
def process_single_prompt(prompt_id)
|
108
|
+
# Skip empty prompt IDs
|
109
|
+
return if prompt_id.nil? || prompt_id.empty?
|
110
|
+
|
111
|
+
prompt = setup_prompt_processing(prompt_id)
|
112
|
+
return unless prompt
|
113
|
+
|
114
|
+
prompt_text = finalize_prompt_text(prompt)
|
115
|
+
send_prompt_and_get_response(prompt_text)
|
116
|
+
end
|
117
|
+
|
118
|
+
def setup_prompt_processing(prompt_id)
|
119
|
+
role_id = AIA.config.role
|
69
120
|
|
70
|
-
# --- Get and process the initial prompt ---
|
71
121
|
begin
|
72
122
|
prompt = @prompt_handler.get_prompt(prompt_id, role_id)
|
73
123
|
rescue StandardError => e
|
74
|
-
puts "Error: #{e.message}"
|
75
|
-
return
|
124
|
+
puts "Error processing prompt '#{prompt_id}': #{e.message}"
|
125
|
+
return nil
|
76
126
|
end
|
77
127
|
|
78
|
-
|
128
|
+
if @include_context_flag
|
129
|
+
collect_variable_values(prompt)
|
130
|
+
enhance_prompt_with_extras(prompt)
|
131
|
+
end
|
132
|
+
|
133
|
+
prompt
|
134
|
+
end
|
135
|
+
|
136
|
+
def finalize_prompt_text(prompt)
|
137
|
+
prompt_text = prompt.to_s
|
138
|
+
|
139
|
+
if @include_context_flag
|
140
|
+
prompt_text = add_context_files(prompt_text)
|
141
|
+
# SMELL: TODO? empty the AIA.config.context_files array
|
142
|
+
@include_context_flag = false
|
143
|
+
end
|
144
|
+
|
145
|
+
prompt_text
|
146
|
+
end
|
147
|
+
|
148
|
+
# Collect variable values from user input
|
149
|
+
def collect_variable_values(prompt)
|
79
150
|
variables = prompt.parameters.keys
|
151
|
+
return if variables.nil? || variables.empty?
|
80
152
|
|
81
|
-
|
82
|
-
|
83
|
-
history_manager = AIA::HistoryManager.new prompt: prompt
|
84
|
-
|
85
|
-
variables.each do |var_name|
|
86
|
-
# History is based on the prompt ID and the variable name (without brackets)
|
87
|
-
history = prompt.parameters[var_name]
|
88
|
-
|
89
|
-
# Ask the user for the variable
|
90
|
-
value = history_manager.request_variable_value(
|
91
|
-
variable_name: var_name,
|
92
|
-
history_values: history
|
93
|
-
)
|
94
|
-
# Store the value using the original BRACKETED key from prompt.parameters
|
95
|
-
if history.include? value
|
96
|
-
history.delete(value)
|
97
|
-
end
|
98
|
-
history << value
|
99
|
-
if history.size > HistoryManager::MAX_VARIABLE_HISTORY
|
100
|
-
history.shift
|
101
|
-
end
|
102
|
-
variable_values[var_name] = history
|
103
|
-
end
|
153
|
+
variable_values = {}
|
154
|
+
history_manager = AIA::HistoryManager.new prompt: prompt
|
104
155
|
|
105
|
-
|
106
|
-
prompt.parameters
|
156
|
+
variables.each do |var_name|
|
157
|
+
history = prompt.parameters[var_name]
|
158
|
+
|
159
|
+
value = history_manager.request_variable_value(
|
160
|
+
variable_name: var_name,
|
161
|
+
history_values: history,
|
162
|
+
)
|
163
|
+
|
164
|
+
variable_values[var_name] = update_variable_history(history, value)
|
107
165
|
end
|
108
166
|
|
167
|
+
prompt.parameters = variable_values
|
168
|
+
end
|
169
|
+
|
170
|
+
def update_variable_history(history, value)
|
171
|
+
history.delete(value) if history.include?(value)
|
172
|
+
history << value
|
173
|
+
history.shift if history.size > HistoryManager::MAX_VARIABLE_HISTORY
|
174
|
+
history
|
175
|
+
end
|
176
|
+
|
177
|
+
# Add terse instructions, stdin content, and executable prompt file content
|
178
|
+
def enhance_prompt_with_extras(prompt)
|
109
179
|
# Add terse instruction if needed
|
110
|
-
if AIA.terse?
|
111
|
-
prompt.text << TERSE_PROMPT
|
112
|
-
end
|
180
|
+
prompt.text << TERSE_PROMPT if AIA.terse?
|
113
181
|
|
182
|
+
# Add STDIN content
|
114
183
|
if AIA.config.stdin_content && !AIA.config.stdin_content.strip.empty?
|
115
184
|
prompt.text << "\n\n" << AIA.config.stdin_content
|
116
185
|
end
|
117
186
|
|
187
|
+
# Add executable prompt file content
|
118
188
|
if AIA.config.executable_prompt_file
|
119
189
|
prompt.text << "\n\n" << File.read(AIA.config.executable_prompt_file)
|
120
|
-
|
121
|
-
|
190
|
+
.lines[1..]
|
191
|
+
.join
|
122
192
|
end
|
193
|
+
end
|
123
194
|
|
124
|
-
|
125
|
-
|
195
|
+
# Add context files to prompt text
|
196
|
+
def add_context_files(prompt_text)
|
197
|
+
return prompt_text unless AIA.config.context_files && !AIA.config.context_files.empty?
|
126
198
|
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
end
|
199
|
+
context = AIA.config.context_files.map do |file|
|
200
|
+
File.read(file) rescue "Error reading file: #{file}"
|
201
|
+
end.join("\n\n")
|
202
|
+
|
203
|
+
"#{prompt_text}\n\nContext:\n#{context}"
|
204
|
+
end
|
134
205
|
|
135
|
-
|
136
|
-
|
206
|
+
# Send prompt to AI and handle the response
|
207
|
+
def send_prompt_and_get_response(prompt_text)
|
208
|
+
# Add prompt to conversation context
|
209
|
+
@context_manager.add_to_context(role: "user", content: prompt_text)
|
137
210
|
|
138
|
-
# Process the
|
211
|
+
# Process the prompt
|
139
212
|
@ui_presenter.display_thinking_animation
|
140
|
-
# Send the current context (which includes the user prompt)
|
141
213
|
response = @chat_processor.process_prompt(@context_manager.get_context)
|
142
214
|
|
143
215
|
# Add AI response to context
|
144
|
-
@context_manager.add_to_context(role:
|
216
|
+
@context_manager.add_to_context(role: "assistant", content: response)
|
145
217
|
|
146
218
|
# Output the response
|
147
|
-
@chat_processor.output_response(response)
|
148
|
-
|
149
|
-
# Process next prompts/pipeline (if any)
|
150
|
-
@chat_processor.process_next_prompts(response, @prompt_handler)
|
219
|
+
@chat_processor.output_response(response)
|
151
220
|
|
152
|
-
#
|
153
|
-
if
|
154
|
-
@
|
155
|
-
|
221
|
+
# Process any directives in the response
|
222
|
+
if @directive_processor.directive?(response)
|
223
|
+
directive_result = @directive_processor.process(response, @context_manager)
|
224
|
+
puts "\nDirective output: #{directive_result}" if directive_result && !directive_result.strip.empty?
|
156
225
|
end
|
157
226
|
end
|
158
227
|
|
@@ -160,158 +229,187 @@ module AIA
|
|
160
229
|
# NOTE: there could have been an initial prompt sent into this session
|
161
230
|
# via a prompt_id on the command line, piped in text, or context files.
|
162
231
|
def start_chat(skip_context_files: false)
|
232
|
+
setup_chat_session
|
233
|
+
process_initial_context(skip_context_files)
|
234
|
+
handle_piped_input
|
235
|
+
run_chat_loop
|
236
|
+
ensure
|
237
|
+
@ui_presenter.display_chat_end
|
238
|
+
end
|
239
|
+
|
240
|
+
private
|
241
|
+
|
242
|
+
def setup_chat_session
|
243
|
+
initialize_chat_ui
|
244
|
+
@chat_prompt_id = generate_chat_prompt_id
|
245
|
+
create_temporary_prompt
|
246
|
+
setup_signal_handlers
|
247
|
+
create_chat_prompt_object
|
248
|
+
Reline::HISTORY.clear
|
249
|
+
end
|
250
|
+
|
251
|
+
def initialize_chat_ui
|
163
252
|
puts "\nEntering interactive chat mode..."
|
164
253
|
@ui_presenter.display_chat_header
|
254
|
+
end
|
165
255
|
|
166
|
-
|
256
|
+
def generate_chat_prompt_id
|
167
257
|
now = Time.now
|
168
|
-
|
258
|
+
"chat_#{now.strftime("%Y%m%d_%H%M%S")}"
|
259
|
+
end
|
169
260
|
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
261
|
+
def create_temporary_prompt
|
262
|
+
now = Time.now
|
263
|
+
PromptManager::Prompt.create(
|
264
|
+
id: @chat_prompt_id,
|
265
|
+
text: "Today's date is #{now.strftime("%Y-%m-%d")} and the current time is #{now.strftime("%H:%M:%S")}",
|
266
|
+
)
|
267
|
+
end
|
177
268
|
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
}
|
187
|
-
|
188
|
-
# Access this chat session's prompt object in order to do the dynamic things
|
189
|
-
# in follow up prompts that can be done in the batch mode like shell substitution. etc.
|
190
|
-
@chat_prompt = PromptManager::Prompt.new(
|
191
|
-
id: @chat_prompt_id,
|
192
|
-
directives_processor: @directive_processor,
|
193
|
-
erb_flag: true,
|
194
|
-
envar_flag: true,
|
195
|
-
external_binding: binding,
|
196
|
-
)
|
269
|
+
def setup_signal_handlers
|
270
|
+
session_instance = self
|
271
|
+
at_exit { session_instance.send(:cleanup_chat_prompt) }
|
272
|
+
Signal.trap("INT") {
|
273
|
+
session_instance.send(:cleanup_chat_prompt)
|
274
|
+
exit
|
275
|
+
}
|
276
|
+
end
|
197
277
|
|
198
|
-
|
278
|
+
def create_chat_prompt_object
|
279
|
+
@chat_prompt = PromptManager::Prompt.new(
|
280
|
+
id: @chat_prompt_id,
|
281
|
+
directives_processor: @directive_processor,
|
282
|
+
erb_flag: true,
|
283
|
+
envar_flag: true,
|
284
|
+
external_binding: binding,
|
285
|
+
)
|
286
|
+
end
|
199
287
|
|
200
|
-
|
201
|
-
|
202
|
-
context = AIA.config.context_files.map do |file|
|
203
|
-
File.read(file) rescue "Error reading file: #{file}"
|
204
|
-
end.join("\n\n")
|
288
|
+
def process_initial_context(skip_context_files)
|
289
|
+
return if skip_context_files || !AIA.config.context_files || AIA.config.context_files.empty?
|
205
290
|
|
206
|
-
|
207
|
-
|
208
|
-
|
291
|
+
context = AIA.config.context_files.map do |file|
|
292
|
+
File.read(file) rescue "Error reading file: #{file}"
|
293
|
+
end.join("\n\n")
|
209
294
|
|
210
|
-
|
211
|
-
@ui_presenter.display_thinking_animation
|
212
|
-
response = @chat_processor.process_prompt(@context_manager.get_context)
|
295
|
+
return if context.empty?
|
213
296
|
|
214
|
-
|
215
|
-
|
297
|
+
# Add context files content to context
|
298
|
+
@context_manager.add_to_context(role: "user", content: context)
|
216
299
|
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
|
222
|
-
|
300
|
+
# Process the context
|
301
|
+
@ui_presenter.display_thinking_animation
|
302
|
+
response = @chat_processor.process_prompt(@context_manager.get_context)
|
303
|
+
|
304
|
+
# Add AI response to context
|
305
|
+
@context_manager.add_to_context(role: "assistant", content: response)
|
306
|
+
|
307
|
+
# Output the response
|
308
|
+
@chat_processor.output_response(response)
|
309
|
+
@chat_processor.speak(response)
|
310
|
+
@ui_presenter.display_separator
|
311
|
+
end
|
312
|
+
|
313
|
+
def handle_piped_input
|
314
|
+
return if STDIN.tty?
|
315
|
+
|
316
|
+
original_stdin = STDIN.dup
|
317
|
+
piped_input = STDIN.read.strip
|
318
|
+
STDIN.reopen("/dev/tty")
|
223
319
|
|
224
|
-
|
225
|
-
if !STDIN.tty?
|
226
|
-
original_stdin = STDIN.dup
|
227
|
-
piped_input = STDIN.read.strip
|
228
|
-
STDIN.reopen('/dev/tty')
|
320
|
+
return if piped_input.empty?
|
229
321
|
|
230
|
-
|
231
|
-
|
232
|
-
processed_input = @chat_prompt.to_s
|
322
|
+
@chat_prompt.text = piped_input
|
323
|
+
processed_input = @chat_prompt.to_s
|
233
324
|
|
234
|
-
|
325
|
+
@context_manager.add_to_context(role: "user", content: processed_input)
|
326
|
+
|
327
|
+
@ui_presenter.display_thinking_animation
|
328
|
+
response = @chat_processor.process_prompt(@context_manager.get_context)
|
235
329
|
|
236
|
-
|
237
|
-
|
330
|
+
@context_manager.add_to_context(role: "assistant", content: response)
|
331
|
+
@chat_processor.output_response(response)
|
332
|
+
@chat_processor.speak(response) if AIA.speak?
|
333
|
+
@ui_presenter.display_separator
|
238
334
|
|
239
|
-
|
240
|
-
|
241
|
-
|
242
|
-
|
335
|
+
STDIN.reopen(original_stdin)
|
336
|
+
end
|
337
|
+
|
338
|
+
def run_chat_loop
|
339
|
+
loop do
|
340
|
+
follow_up_prompt = @ui_presenter.ask_question
|
341
|
+
|
342
|
+
break if follow_up_prompt.nil? || follow_up_prompt.strip.downcase == "exit" || follow_up_prompt.strip.empty?
|
343
|
+
|
344
|
+
if AIA.config.out_file
|
345
|
+
File.open(AIA.config.out_file, "a") do |file|
|
346
|
+
file.puts "\nYou: #{follow_up_prompt}"
|
243
347
|
end
|
348
|
+
end
|
244
349
|
|
245
|
-
|
350
|
+
if @directive_processor.directive?(follow_up_prompt)
|
351
|
+
follow_up_prompt = process_chat_directive(follow_up_prompt)
|
352
|
+
next if follow_up_prompt.nil?
|
246
353
|
end
|
247
354
|
|
248
|
-
|
249
|
-
|
250
|
-
follow_up_prompt = @ui_presenter.ask_question
|
355
|
+
@chat_prompt.text = follow_up_prompt
|
356
|
+
processed_prompt = @chat_prompt.to_s
|
251
357
|
|
252
|
-
|
358
|
+
@context_manager.add_to_context(role: "user", content: processed_prompt)
|
359
|
+
conversation = @context_manager.get_context
|
253
360
|
|
254
|
-
|
255
|
-
|
256
|
-
file.puts "\nYou: #{follow_up_prompt}"
|
257
|
-
end
|
258
|
-
end
|
361
|
+
@ui_presenter.display_thinking_animation
|
362
|
+
response = @chat_processor.process_prompt(conversation)
|
259
363
|
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
if follow_up_prompt.strip.start_with?('//clear')
|
264
|
-
# The directive processor has called context_manager.clear_context
|
265
|
-
# but we need a more aggressive approach to fully clear all context
|
266
|
-
|
267
|
-
# First, clear the context manager's context
|
268
|
-
@context_manager.clear_context(keep_system_prompt: true)
|
269
|
-
|
270
|
-
# Second, try clearing the client's context
|
271
|
-
if AIA.config.client && AIA.config.client.respond_to?(:clear_context)
|
272
|
-
AIA.config.client.clear_context
|
273
|
-
end
|
274
|
-
|
275
|
-
# Third, completely reinitialize the client to ensure fresh state
|
276
|
-
# This is the most aggressive approach to ensure no context remains
|
277
|
-
begin
|
278
|
-
AIA.config.client = AIA::RubyLLMAdapter.new
|
279
|
-
rescue => e
|
280
|
-
STDERR.puts "Error reinitializing client: #{e.message}"
|
281
|
-
end
|
282
|
-
|
283
|
-
@ui_presenter.display_info("Chat context cleared.")
|
284
|
-
next
|
285
|
-
elsif directive_output.nil? || directive_output.strip.empty?
|
286
|
-
next
|
287
|
-
else
|
288
|
-
puts "\n#{directive_output}\n"
|
289
|
-
follow_up_prompt = "I executed this directive: #{follow_up_prompt}\nHere's the output: #{directive_output}\nLet's continue our conversation."
|
290
|
-
end
|
291
|
-
end
|
364
|
+
@ui_presenter.display_ai_response(response)
|
365
|
+
@context_manager.add_to_context(role: "assistant", content: response)
|
366
|
+
@chat_processor.speak(response)
|
292
367
|
|
293
|
-
|
294
|
-
|
368
|
+
@ui_presenter.display_separator
|
369
|
+
end
|
370
|
+
end
|
295
371
|
|
296
|
-
|
297
|
-
|
372
|
+
def process_chat_directive(follow_up_prompt)
|
373
|
+
directive_output = @directive_processor.process(follow_up_prompt, @context_manager)
|
374
|
+
|
375
|
+
return handle_clear_directive if follow_up_prompt.strip.start_with?("//clear")
|
376
|
+
return handle_empty_directive_output if directive_output.nil? || directive_output.strip.empty?
|
377
|
+
|
378
|
+
handle_successful_directive(follow_up_prompt, directive_output)
|
379
|
+
end
|
298
380
|
|
299
|
-
|
300
|
-
|
381
|
+
def handle_clear_directive
|
382
|
+
# The directive processor has called context_manager.clear_context
|
383
|
+
# but we need a more aggressive approach to fully clear all context
|
301
384
|
|
302
|
-
|
303
|
-
|
304
|
-
@chat_processor.speak(response)
|
385
|
+
# First, clear the context manager's context
|
386
|
+
@context_manager.clear_context(keep_system_prompt: true)
|
305
387
|
|
306
|
-
|
307
|
-
|
388
|
+
# Second, try clearing the client's context
|
389
|
+
if AIA.config.client && AIA.config.client.respond_to?(:clear_context)
|
390
|
+
AIA.config.client.clear_context
|
391
|
+
end
|
308
392
|
|
309
|
-
ensure
|
310
|
-
|
393
|
+
# Third, completely reinitialize the client to ensure fresh state
|
394
|
+
# This is the most aggressive approach to ensure no context remains
|
395
|
+
begin
|
396
|
+
AIA.config.client = AIA::RubyLLMAdapter.new
|
397
|
+
rescue => e
|
398
|
+
STDERR.puts "Error reinitializing client: #{e.message}"
|
311
399
|
end
|
400
|
+
|
401
|
+
@ui_presenter.display_info("Chat context cleared.")
|
402
|
+
nil
|
312
403
|
end
|
313
404
|
|
314
|
-
|
405
|
+
def handle_empty_directive_output
|
406
|
+
nil
|
407
|
+
end
|
408
|
+
|
409
|
+
def handle_successful_directive(follow_up_prompt, directive_output)
|
410
|
+
puts "\n#{directive_output}\n"
|
411
|
+
"I executed this directive: #{follow_up_prompt}\nHere's the output: #{directive_output}\nLet's continue our conversation."
|
412
|
+
end
|
315
413
|
|
316
414
|
def cleanup_chat_prompt
|
317
415
|
if @chat_prompt_id
|
data/lib/aia/ui_presenter.rb
CHANGED
@@ -54,9 +54,9 @@ module AIA
|
|
54
54
|
line = line.chomp
|
55
55
|
|
56
56
|
# Check for code block delimiters
|
57
|
-
if line.match
|
57
|
+
if (match = line.match(/^```(\w*)$/)) && !in_code_block
|
58
58
|
in_code_block = true
|
59
|
-
language =
|
59
|
+
language = match[1]
|
60
60
|
output.puts "#{indent}```#{language}"
|
61
61
|
elsif line.match?(/^```$/) && in_code_block
|
62
62
|
in_code_block = false
|
@@ -107,9 +107,11 @@ module AIA
|
|
107
107
|
spinner = TTY::Spinner.new("[:spinner] #{spinner_message}", format: :bouncing_ball)
|
108
108
|
spinner.auto_spin
|
109
109
|
|
110
|
-
|
111
|
-
|
112
|
-
|
110
|
+
begin
|
111
|
+
result = yield
|
112
|
+
ensure
|
113
|
+
spinner.stop
|
114
|
+
end
|
113
115
|
result
|
114
116
|
else
|
115
117
|
yield
|