aia 0.9.6 → 0.9.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/lib/aia/session.rb CHANGED
@@ -1,149 +1,208 @@
1
1
  # lib/aia/session.rb
2
2
 
3
- require 'tty-spinner'
4
- require 'tty-screen'
5
- require 'reline'
6
- require 'prompt_manager'
7
- require 'json'
8
- require 'fileutils'
9
- require 'amazing_print'
10
- require_relative 'directive_processor'
11
- require_relative 'history_manager'
12
- require_relative 'context_manager'
13
- require_relative 'ui_presenter'
14
- require_relative 'chat_processor_service'
15
- require_relative 'prompt_handler'
16
- require_relative 'utility'
3
+ require "tty-spinner"
4
+ require "tty-screen"
5
+ require "reline"
6
+ require "prompt_manager"
7
+ require "json"
8
+ require "fileutils"
9
+ require "amazing_print"
10
+ require_relative "directive_processor"
11
+ require_relative "history_manager"
12
+ require_relative "context_manager"
13
+ require_relative "ui_presenter"
14
+ require_relative "chat_processor_service"
15
+ require_relative "prompt_handler"
16
+ require_relative "utility"
17
17
 
18
18
  module AIA
19
19
  class Session
20
20
  KW_HISTORY_MAX = 5 # Maximum number of history entries per keyword
21
- TERSE_PROMPT = "\nKeep your response short and to the point.\n"
21
+ TERSE_PROMPT = "\nKeep your response short and to the point.\n"
22
22
 
23
23
  def initialize(prompt_handler)
24
- @prompt_handler = prompt_handler
24
+ @prompt_handler = prompt_handler
25
25
  @chat_prompt_id = nil # Initialize to nil
26
+ @include_context_flag = true # Flag to include context files, stdin, etc. only once
26
27
 
27
28
  # Special handling for chat mode with context files but no prompt ID
28
29
  if AIA.chat? && AIA.config.prompt_id.empty? && AIA.config.context_files && !AIA.config.context_files.empty?
29
- prompt_instance = nil
30
+ prompt_instance = nil
30
31
  @history_manager = nil
31
32
  elsif AIA.chat? && AIA.config.prompt_id.empty?
32
- prompt_instance = nil
33
+ prompt_instance = nil
33
34
  @history_manager = nil
34
35
  else
35
- prompt_instance = @prompt_handler.get_prompt(AIA.config.prompt_id)
36
+ prompt_instance = @prompt_handler.get_prompt(AIA.config.prompt_id)
36
37
  @history_manager = HistoryManager.new(prompt: prompt_instance)
37
38
  end
38
39
 
39
- @context_manager = ContextManager.new(system_prompt: AIA.config.system_prompt)
40
- @ui_presenter = UIPresenter.new
40
+ @context_manager = ContextManager.new(system_prompt: AIA.config.system_prompt)
41
+ @ui_presenter = UIPresenter.new
41
42
  @directive_processor = DirectiveProcessor.new
42
- @chat_processor = ChatProcessorService.new(@ui_presenter, @directive_processor)
43
+ @chat_processor = ChatProcessorService.new(@ui_presenter, @directive_processor)
43
44
 
44
- if AIA.config.out_file && !AIA.append? && File.exist?(AIA.config.out_file)
45
- File.open(AIA.config.out_file, 'w') {} # Truncate the file
45
+ if AIA.config.out_file && !AIA.config.out_file.nil? && !AIA.append? && File.exist?(AIA.config.out_file)
46
+ File.open(AIA.config.out_file, "w") { } # Truncate the file
46
47
  end
47
48
  end
48
49
 
49
- # Starts the session, processing the initial prompt and handling user
50
- # interactions. It manages the flow of prompts, context, and responses.
50
+ # Starts the session, processing all prompts in the pipeline and then
51
+ # optionally starting an interactive chat session.
51
52
  def start
52
- prompt_id = AIA.config.prompt_id
53
- role_id = AIA.config.role
53
+ # Handle special chat-only cases first
54
+ if should_start_chat_immediately?
55
+ AIA::Utility.robot
56
+ start_chat
57
+ return
58
+ end
54
59
 
55
- # Handle chat mode
60
+ # Process all prompts in the pipeline
61
+ process_all_prompts
62
+
63
+ # Start chat mode after all prompts are processed
56
64
  if AIA.chat?
57
65
  AIA::Utility.robot
58
- # If we're in chat mode with only context files, go straight to chat
59
- if prompt_id.empty? && role_id.empty? && AIA.config.context_files && !AIA.config.context_files.empty?
60
- start_chat
61
- return
62
- elsif prompt_id.empty? && role_id.empty?
63
- # Even with an empty prompt_id, we might have context files
64
- start_chat
65
- return
66
- end
66
+ @ui_presenter.display_separator
67
+ start_chat(skip_context_files: true)
68
+ end
69
+ end
70
+
71
+ private
72
+
73
+ # Check if we should start chat immediately without processing any prompts
74
+ def should_start_chat_immediately?
75
+ return false unless AIA.chat?
76
+
77
+ # If pipeline is empty or only contains empty prompt_ids, go straight to chat
78
+ AIA.config.pipeline.empty? || AIA.config.pipeline.all? { |id| id.nil? || id.empty? }
79
+ end
80
+
81
+ # Process all prompts in the pipeline sequentially
82
+ def process_all_prompts
83
+ prompt_count = 0
84
+ total_prompts = AIA.config.pipeline.size
85
+
86
+ until AIA.config.pipeline.empty?
87
+ prompt_count += 1
88
+ prompt_id = AIA.config.pipeline.shift
89
+
90
+ puts "\n--- Processing prompt #{prompt_count}/#{total_prompts}: #{prompt_id} ---" if AIA.verbose? && total_prompts > 1
91
+
92
+ process_single_prompt(prompt_id)
67
93
  end
94
+ end
95
+
96
+ # Process a single prompt with all its requirements
97
+ def process_single_prompt(prompt_id)
98
+ role_id = AIA.config.role
68
99
 
100
+ # Skip empty prompt IDs
101
+ return if prompt_id.nil? || prompt_id.empty?
69
102
 
70
- # --- Get and process the initial prompt ---
103
+ # Get and process the prompt
71
104
  begin
72
105
  prompt = @prompt_handler.get_prompt(prompt_id, role_id)
73
106
  rescue StandardError => e
74
- puts "Error: #{e.message}"
107
+ puts "Error processing prompt '#{prompt_id}': #{e.message}"
75
108
  return
76
109
  end
77
110
 
78
- # Collect variable values if needed
111
+ # Collect variable values (only when flag is true)
112
+ if @include_context_flag
113
+ collect_variable_values(prompt)
114
+ enhance_prompt_with_extras(prompt)
115
+ end
116
+
117
+ # Get final prompt text
118
+ prompt_text = prompt.to_s
119
+
120
+ # Add context files (only when flag is true)
121
+ if @include_context_flag
122
+ prompt_text = add_context_files(prompt_text)
123
+ # SMELL: TODO? empty the AIA.config.context_files array
124
+ @include_context_flag = false
125
+ end
126
+
127
+ # Send prompt to AI and get response
128
+ send_prompt_and_get_response(prompt_text)
129
+ end
130
+
131
+ # Collect variable values from user input
132
+ def collect_variable_values(prompt)
79
133
  variables = prompt.parameters.keys
134
+ return if variables.nil? || variables.empty?
80
135
 
81
- if variables && !variables.empty?
82
- variable_values = {}
83
- history_manager = AIA::HistoryManager.new prompt: prompt
84
-
85
- variables.each do |var_name|
86
- # History is based on the prompt ID and the variable name (without brackets)
87
- history = prompt.parameters[var_name]
88
-
89
- # Ask the user for the variable
90
- value = history_manager.request_variable_value(
91
- variable_name: var_name,
92
- history_values: history
93
- )
94
- # Store the value using the original BRACKETED key from prompt.parameters
95
- if history.include? value
96
- history.delete(value)
97
- end
98
- history << value
99
- if history.size > HistoryManager::MAX_VARIABLE_HISTORY
100
- history.shift
101
- end
102
- variable_values[var_name] = history
103
- end
136
+ variable_values = {}
137
+ history_manager = AIA::HistoryManager.new prompt: prompt
138
+
139
+ variables.each do |var_name|
140
+ history = prompt.parameters[var_name]
104
141
 
105
- # Assign collected values back for prompt_manager substitution
106
- prompt.parameters = variable_values
142
+ value = history_manager.request_variable_value(
143
+ variable_name: var_name,
144
+ history_values: history,
145
+ )
146
+
147
+ # Update history
148
+ history.delete(value) if history.include?(value)
149
+ history << value
150
+ history.shift if history.size > HistoryManager::MAX_VARIABLE_HISTORY
151
+
152
+ variable_values[var_name] = history
107
153
  end
108
154
 
155
+ prompt.parameters = variable_values
156
+ end
157
+
158
+ # Add terse instructions, stdin content, and executable prompt file content
159
+ def enhance_prompt_with_extras(prompt)
109
160
  # Add terse instruction if needed
110
- if AIA.terse?
111
- prompt.text << TERSE_PROMPT
112
- end
161
+ prompt.text << TERSE_PROMPT if AIA.terse?
113
162
 
114
- prompt.save
115
- # Substitute variables and get final prompt text
116
- prompt_text = prompt.to_s
163
+ # Add STDIN content
164
+ if AIA.config.stdin_content && !AIA.config.stdin_content.strip.empty?
165
+ prompt.text << "\n\n" << AIA.config.stdin_content
166
+ end
117
167
 
118
- # Add context files if any
119
- if AIA.config.context_files && !AIA.config.context_files.empty?
120
- context = AIA.config.context_files.map do |file|
121
- File.read(file) rescue "Error reading file: #{file}"
122
- end.join("\n\n")
123
- prompt_text = "#{prompt_text}\n\nContext:\n#{context}"
168
+ # Add executable prompt file content
169
+ if AIA.config.executable_prompt_file
170
+ prompt.text << "\n\n" << File.read(AIA.config.executable_prompt_file)
171
+ .lines[1..]
172
+ .join
124
173
  end
174
+ end
125
175
 
126
- # Add initial user prompt to context *before* sending to AI
127
- @context_manager.add_to_context(role: 'user', content: prompt_text)
176
+ # Add context files to prompt text
177
+ def add_context_files(prompt_text)
178
+ return prompt_text unless AIA.config.context_files && !AIA.config.context_files.empty?
179
+
180
+ context = AIA.config.context_files.map do |file|
181
+ File.read(file) rescue "Error reading file: #{file}"
182
+ end.join("\n\n")
183
+
184
+ "#{prompt_text}\n\nContext:\n#{context}"
185
+ end
128
186
 
129
- # Process the initial prompt
187
+ # Send prompt to AI and handle the response
188
+ def send_prompt_and_get_response(prompt_text)
189
+ # Add prompt to conversation context
190
+ @context_manager.add_to_context(role: "user", content: prompt_text)
191
+
192
+ # Process the prompt
130
193
  @ui_presenter.display_thinking_animation
131
- # Send the current context (which includes the user prompt)
132
194
  response = @chat_processor.process_prompt(@context_manager.get_context)
133
195
 
134
196
  # Add AI response to context
135
- @context_manager.add_to_context(role: 'assistant', content: response)
197
+ @context_manager.add_to_context(role: "assistant", content: response)
136
198
 
137
199
  # Output the response
138
- @chat_processor.output_response(response) # Handles display
139
-
140
- # Process next prompts/pipeline (if any)
141
- @chat_processor.process_next_prompts(response, @prompt_handler)
200
+ @chat_processor.output_response(response)
142
201
 
143
- # --- Enter chat mode AFTER processing initial prompt ---
144
- if AIA.chat?
145
- @ui_presenter.display_separator # Add separator
146
- start_chat(skip_context_files: true) # start_chat will use the now populated context
202
+ # Process any directives in the response
203
+ if @directive_processor.directive?(response)
204
+ directive_result = @directive_processor.process(response, @context_manager)
205
+ puts "\nDirective output: #{directive_result}" if directive_result && !directive_result.strip.empty?
147
206
  end
148
207
  end
149
208
 
@@ -156,14 +215,14 @@ module AIA
156
215
 
157
216
  # Generate chat prompt ID
158
217
  now = Time.now
159
- @chat_prompt_id = "chat_#{now.strftime('%Y%m%d_%H%M%S')}"
218
+ @chat_prompt_id = "chat_#{now.strftime("%Y%m%d_%H%M%S")}"
160
219
 
161
220
  # Create the temporary prompt
162
221
  begin
163
222
  # Create the unique? prompt ID in the file storage system with its initial text
164
223
  PromptManager::Prompt.create(
165
224
  id: @chat_prompt_id,
166
- text: "Today's date is #{now.strftime('%Y-%m-%d')} and the current time is #{now.strftime('%H:%M:%S')}"
225
+ text: "Today's date is #{now.strftime("%Y-%m-%d")} and the current time is #{now.strftime("%H:%M:%S")}",
167
226
  )
168
227
 
169
228
  # Capture self for the handlers
@@ -171,7 +230,7 @@ module AIA
171
230
 
172
231
  # Set up cleanup handlers only after prompt is created
173
232
  at_exit { session_instance.send(:cleanup_chat_prompt) }
174
- Signal.trap('INT') {
233
+ Signal.trap("INT") {
175
234
  session_instance.send(:cleanup_chat_prompt)
176
235
  exit
177
236
  }
@@ -181,9 +240,9 @@ module AIA
181
240
  @chat_prompt = PromptManager::Prompt.new(
182
241
  id: @chat_prompt_id,
183
242
  directives_processor: @directive_processor,
184
- erb_flag: true,
185
- envar_flag: true,
186
- external_binding: binding,
243
+ erb_flag: true,
244
+ envar_flag: true,
245
+ external_binding: binding,
187
246
  )
188
247
 
189
248
  Reline::HISTORY.clear
@@ -196,14 +255,14 @@ module AIA
196
255
 
197
256
  if !context.empty?
198
257
  # Add context files content to context
199
- @context_manager.add_to_context(role: 'user', content: context)
258
+ @context_manager.add_to_context(role: "user", content: context)
200
259
 
201
260
  # Process the context
202
261
  @ui_presenter.display_thinking_animation
203
262
  response = @chat_processor.process_prompt(@context_manager.get_context)
204
263
 
205
264
  # Add AI response to context
206
- @context_manager.add_to_context(role: 'assistant', content: response)
265
+ @context_manager.add_to_context(role: "assistant", content: response)
207
266
 
208
267
  # Output the response
209
268
  @chat_processor.output_response(response)
@@ -216,18 +275,18 @@ module AIA
216
275
  if !STDIN.tty?
217
276
  original_stdin = STDIN.dup
218
277
  piped_input = STDIN.read.strip
219
- STDIN.reopen('/dev/tty')
278
+ STDIN.reopen("/dev/tty")
220
279
 
221
280
  if !piped_input.empty?
222
281
  @chat_prompt.text = piped_input
223
282
  processed_input = @chat_prompt.to_s
224
283
 
225
- @context_manager.add_to_context(role: 'user', content: processed_input)
284
+ @context_manager.add_to_context(role: "user", content: processed_input)
226
285
 
227
286
  @ui_presenter.display_thinking_animation
228
287
  response = @chat_processor.process_prompt(@context_manager.get_context)
229
288
 
230
- @context_manager.add_to_context(role: 'assistant', content: response)
289
+ @context_manager.add_to_context(role: "assistant", content: response)
231
290
  @chat_processor.output_response(response)
232
291
  @chat_processor.speak(response) if AIA.speak?
233
292
  @ui_presenter.display_separator
@@ -240,10 +299,10 @@ module AIA
240
299
  loop do
241
300
  follow_up_prompt = @ui_presenter.ask_question
242
301
 
243
- break if follow_up_prompt.nil? || follow_up_prompt.strip.downcase == 'exit' || follow_up_prompt.strip.empty?
302
+ break if follow_up_prompt.nil? || follow_up_prompt.strip.downcase == "exit" || follow_up_prompt.strip.empty?
244
303
 
245
304
  if AIA.config.out_file
246
- File.open(AIA.config.out_file, 'a') do |file|
305
+ File.open(AIA.config.out_file, "a") do |file|
247
306
  file.puts "\nYou: #{follow_up_prompt}"
248
307
  end
249
308
  end
@@ -251,28 +310,28 @@ module AIA
251
310
  if @directive_processor.directive?(follow_up_prompt)
252
311
  directive_output = @directive_processor.process(follow_up_prompt, @context_manager)
253
312
 
254
- if follow_up_prompt.strip.start_with?('//clear')
255
- # The directive processor has called context_manager.clear_context
256
- # but we need a more aggressive approach to fully clear all context
313
+ if follow_up_prompt.strip.start_with?("//clear")
314
+ # The directive processor has called context_manager.clear_context
315
+ # but we need a more aggressive approach to fully clear all context
257
316
 
258
- # First, clear the context manager's context
259
- @context_manager.clear_context(keep_system_prompt: true)
317
+ # First, clear the context manager's context
318
+ @context_manager.clear_context(keep_system_prompt: true)
260
319
 
261
- # Second, try clearing the client's context
262
- if AIA.config.client && AIA.config.client.respond_to?(:clear_context)
263
- AIA.config.client.clear_context
264
- end
320
+ # Second, try clearing the client's context
321
+ if AIA.config.client && AIA.config.client.respond_to?(:clear_context)
322
+ AIA.config.client.clear_context
323
+ end
265
324
 
266
- # Third, completely reinitialize the client to ensure fresh state
267
- # This is the most aggressive approach to ensure no context remains
268
- begin
269
- AIA.config.client = AIA::RubyLLMAdapter.new
270
- rescue => e
271
- STDERR.puts "Error reinitializing client: #{e.message}"
272
- end
325
+ # Third, completely reinitialize the client to ensure fresh state
326
+ # This is the most aggressive approach to ensure no context remains
327
+ begin
328
+ AIA.config.client = AIA::RubyLLMAdapter.new
329
+ rescue => e
330
+ STDERR.puts "Error reinitializing client: #{e.message}"
331
+ end
273
332
 
274
- @ui_presenter.display_info("Chat context cleared.")
275
- next
333
+ @ui_presenter.display_info("Chat context cleared.")
334
+ next
276
335
  elsif directive_output.nil? || directive_output.strip.empty?
277
336
  next
278
337
  else
@@ -284,19 +343,18 @@ module AIA
284
343
  @chat_prompt.text = follow_up_prompt
285
344
  processed_prompt = @chat_prompt.to_s
286
345
 
287
- @context_manager.add_to_context(role: 'user', content: processed_prompt)
346
+ @context_manager.add_to_context(role: "user", content: processed_prompt)
288
347
  conversation = @context_manager.get_context
289
348
 
290
349
  @ui_presenter.display_thinking_animation
291
350
  response = @chat_processor.process_prompt(conversation)
292
351
 
293
352
  @ui_presenter.display_ai_response(response)
294
- @context_manager.add_to_context(role: 'assistant', content: response)
353
+ @context_manager.add_to_context(role: "assistant", content: response)
295
354
  @chat_processor.speak(response)
296
355
 
297
356
  @ui_presenter.display_separator
298
357
  end
299
-
300
358
  ensure
301
359
  @ui_presenter.display_chat_end
302
360
  end
@@ -26,7 +26,7 @@ module AIA
26
26
  puts "\nAI: "
27
27
  format_chat_response(response)
28
28
 
29
- if AIA.config.out_file
29
+ if AIA.config.out_file && !AIA.config.out_file.nil?
30
30
  File.open(AIA.config.out_file, 'a') do |file|
31
31
  file.puts "\nAI: "
32
32
  format_chat_response(response, file)
data/lib/aia.rb CHANGED
@@ -6,8 +6,6 @@
6
6
 
7
7
  require 'ruby_llm'
8
8
  require 'ruby_llm/mcp'
9
- require 'shared_tools'
10
- require 'shared_tools/ruby_llm'
11
9
  require 'prompt_manager'
12
10
 
13
11
 
@@ -41,6 +39,10 @@ require_relative 'aia/session'
41
39
  module AIA
42
40
  at_exit do
43
41
  STDERR.puts "Exiting AIA application..."
42
+ # Clean up temporary STDIN file if it exists
43
+ if @config&.stdin_temp_file && File.exist?(@config.stdin_temp_file)
44
+ File.unlink(@config.stdin_temp_file)
45
+ end
44
46
  end
45
47
 
46
48
  @config = nil
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: aia
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.9.6
4
+ version: 0.9.8
5
5
  platform: ruby
6
6
  authors:
7
7
  - Dewayne VanHoozer
@@ -10,7 +10,7 @@ cert_chain: []
10
10
  date: 1980-01-02 00:00:00.000000000 Z
11
11
  dependencies:
12
12
  - !ruby/object:Gem::Dependency
13
- name: amazing_print
13
+ name: activesupport
14
14
  requirement: !ruby/object:Gem::Requirement
15
15
  requirements:
16
16
  - - ">="
@@ -24,7 +24,7 @@ dependencies:
24
24
  - !ruby/object:Gem::Version
25
25
  version: '0'
26
26
  - !ruby/object:Gem::Dependency
27
- name: faraday
27
+ name: amazing_print
28
28
  requirement: !ruby/object:Gem::Requirement
29
29
  requirements:
30
30
  - - ">="
@@ -38,49 +38,49 @@ dependencies:
38
38
  - !ruby/object:Gem::Version
39
39
  version: '0'
40
40
  - !ruby/object:Gem::Dependency
41
- name: prompt_manager
41
+ name: faraday
42
42
  requirement: !ruby/object:Gem::Requirement
43
43
  requirements:
44
44
  - - ">="
45
45
  - !ruby/object:Gem::Version
46
- version: 0.5.6
46
+ version: '0'
47
47
  type: :runtime
48
48
  prerelease: false
49
49
  version_requirements: !ruby/object:Gem::Requirement
50
50
  requirements:
51
51
  - - ">="
52
52
  - !ruby/object:Gem::Version
53
- version: 0.5.6
53
+ version: '0'
54
54
  - !ruby/object:Gem::Dependency
55
- name: ruby_llm
55
+ name: prompt_manager
56
56
  requirement: !ruby/object:Gem::Requirement
57
57
  requirements:
58
58
  - - ">="
59
59
  - !ruby/object:Gem::Version
60
- version: 1.3.1
60
+ version: 0.5.7
61
61
  type: :runtime
62
62
  prerelease: false
63
63
  version_requirements: !ruby/object:Gem::Requirement
64
64
  requirements:
65
65
  - - ">="
66
66
  - !ruby/object:Gem::Version
67
- version: 1.3.1
67
+ version: 0.5.7
68
68
  - !ruby/object:Gem::Dependency
69
- name: ruby_llm-mcp
69
+ name: ruby_llm
70
70
  requirement: !ruby/object:Gem::Requirement
71
71
  requirements:
72
72
  - - ">="
73
73
  - !ruby/object:Gem::Version
74
- version: '0'
74
+ version: 1.3.1
75
75
  type: :runtime
76
76
  prerelease: false
77
77
  version_requirements: !ruby/object:Gem::Requirement
78
78
  requirements:
79
79
  - - ">="
80
80
  - !ruby/object:Gem::Version
81
- version: '0'
81
+ version: 1.3.1
82
82
  - !ruby/object:Gem::Dependency
83
- name: reline
83
+ name: ruby_llm-mcp
84
84
  requirement: !ruby/object:Gem::Requirement
85
85
  requirements:
86
86
  - - ">="
@@ -94,7 +94,7 @@ dependencies:
94
94
  - !ruby/object:Gem::Version
95
95
  version: '0'
96
96
  - !ruby/object:Gem::Dependency
97
- name: shared_tools
97
+ name: reline
98
98
  requirement: !ruby/object:Gem::Requirement
99
99
  requirements:
100
100
  - - ">="
@@ -318,6 +318,7 @@ files:
318
318
  - examples/headlines
319
319
  - examples/tools/edit_file.rb
320
320
  - examples/tools/list_files.rb
321
+ - examples/tools/pdf_page_reader.rb
321
322
  - examples/tools/read_file.rb
322
323
  - examples/tools/run_shell_command.rb
323
324
  - images/aia.png