aia 0.9.7 → 0.9.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 21134c8c9fa3664309baf0c3d1bce2857dffd3dd1ad8c554c7cb15ac48b57634
4
- data.tar.gz: 46f64bdbefe73eede9add530bad3a4957f6b7033706a1d1daf5aa6bef1fb6084
3
+ metadata.gz: 3411bc073c7fadee9f625917b13b06fb11aee61c234f61c57958be1000f261c6
4
+ data.tar.gz: 1228b68648536d5c8a4caeb2e6cf6eb91400647c96163b1ab7b6f499b1001d99
5
5
  SHA512:
6
- metadata.gz: dc1273757619b1962986e1dfac94b8d10b2c493ff0cf0d01d0938c65a902700d96484d42e661770328a4f6f904a1265f6a136964fb8c31218ed22135fed806dc
7
- data.tar.gz: edc7124424b74f7d167a78635abd7302eb05cd4208058947dd8700e5e05c1200eba47346d1f0beea0ed9ea37a5875ca7cb585d40752dbcf88c64063dc278e398
6
+ metadata.gz: cbd94570f820e5007a7a9ba94b908a0f36407ec9b9f11074283e67374b24261db18309f32c2e079521c93322ce6fe4c2223181d61f3dce99c4a0d39bc5a26e48
7
+ data.tar.gz: fdd5647498501805e598c5e7d319193928954e1e2e639e473440c323e49abee46c09aa665130679e790db15c49d81f0ab5bf09deb5ac0f4b6b67eba6a9da625c
@@ -1,5 +1,6 @@
1
1
  label: "## Table of Contents"
2
2
  patterns:
3
3
  - "README.md"
4
- - doc/*.md
4
+ - docs/*.md
5
+ - docs/draft_articles/*.md
5
6
  root_dir: "."
data/.version CHANGED
@@ -1 +1 @@
1
- 0.9.7
1
+ 0.9.8
data/CHANGELOG.md CHANGED
@@ -1,10 +1,13 @@
1
1
  # Changelog
2
2
  ## [Unreleased]
3
3
 
4
- ### [0.9.8] WIP
5
-
6
4
  ## Released
7
5
 
6
+ ### [0.9.8] 2025-06-25
7
+ - fixing an issue with pipelined prompts
8
+ - now showing the complete modality of the model on the processing line.
9
+ - changed -p option from prompts_dir to pipeline
10
+
8
11
  ### [0.9.7] 2025-06-20
9
12
 
10
13
  - **NEW FEATURE**: Added `--available_models` CLI option to list all available AI models
@@ -0,0 +1,30 @@
1
+ # examples/tools/pdf_page_reader.rb
2
+ # See: https://max.engineer/giant-pdf-llm
3
+
4
+ require "ruby_llm/tool"
5
+ require 'pdf-reader'
6
+
7
+
8
+ class PdfPageReader < RubyLLM::Tool
9
+ # TODO: make the path to the pdf document a parameter
10
+ DOC = PDF::Reader.new('docs/big-doc.pdf')
11
+
12
+ description 'Read the text of any set of pages from a PDF document.'
13
+ param :page_numbers,
14
+ desc: 'Comma-separated page numbers (first page: 1). (e.g. "12, 14, 15")'
15
+
16
+ def execute(page_numbers:)
17
+ puts "\n-- Reading pages: #{page_numbers}\n\n"
18
+ page_numbers = page_numbers.split(',').map { _1.strip.to_i }
19
+ pages = page_numbers.map { [_1, DOC.pages[_1.to_i - 1]] }
20
+ {
21
+ pages: pages.map { |num, p|
22
+ # There are lines drawn with dots in my doc.
23
+ # So I squeeze them to save tokens.
24
+ { page: num, text: p&.text&.squeeze('.') }
25
+ }
26
+ }
27
+ rescue => e
28
+ { error: e.message }
29
+ end
30
+ end
@@ -60,9 +60,8 @@ module AIA
60
60
  def output_response(response)
61
61
  speak(response)
62
62
 
63
- # Only output to STDOUT if we're in chat mode
64
-
65
- if AIA.chat? || AIA.config.out_file.nil? || 'STDOUT' == AIA.config.out_file.upcase
63
+ # Output to STDOUT or file based on out_file configuration
64
+ if AIA.config.out_file.nil? || 'STDOUT' == AIA.config.out_file.upcase
66
65
  print "\nAI:\n "
67
66
  puts response
68
67
  else
@@ -94,26 +93,7 @@ module AIA
94
93
 
95
94
  def determine_operation_type
96
95
  mode = AIA.config.client.model.modalities
97
-
98
- if mode.text_to_image?
99
- :text_to_image
100
- elsif mode.image_to_text?
101
- :image_to_text
102
- elsif mode.audio_to_text?
103
- :audio_to_text
104
- elsif mode.text_to_audio?
105
- :text_to_audio
106
- elsif mode.audio_to_audio?
107
- :audio_to_audio
108
- elsif mode.image_to_image?
109
- :image_to_image
110
- elsif mode.audio_to_image?
111
- :audio_to_image
112
- elsif mode.image_to_audio?
113
- :image_to_audio
114
- else
115
- :text_to_text
116
- end
96
+ mode.input.join(',') + " TO " + mode.output.join(',')
117
97
  end
118
98
  end
119
99
  end
data/lib/aia/config.rb CHANGED
@@ -54,7 +54,6 @@ module AIA
54
54
  append: false, # Default to not append to existing out_file
55
55
 
56
56
  # workflow
57
- next: nil,
58
57
  pipeline: [],
59
58
 
60
59
  # PromptManager::Prompt Tailoring
@@ -125,10 +124,11 @@ module AIA
125
124
  config.remaining_args = nil
126
125
 
127
126
  # Check for STDIN content
128
- stdin_content = nil
127
+ stdin_content = ''
128
+
129
129
  if !STDIN.tty? && !STDIN.closed?
130
130
  begin
131
- stdin_content = STDIN.read
131
+ stdin_content << "\n" + STDIN.read
132
132
  STDIN.reopen('/dev/tty') # Reopen STDIN for interactive use
133
133
  rescue => _
134
134
  # If we can't reopen, continue without error
@@ -190,6 +190,7 @@ module AIA
190
190
  if config.prompt_id.nil? || config.prompt_id.empty?
191
191
  if !config.role.nil? || !config.role.empty?
192
192
  config.prompt_id = config.role
193
+ config.pipeline.prepend config.prompt_id
193
194
  config.role = ''
194
195
  end
195
196
  end
@@ -242,6 +243,25 @@ module AIA
242
243
  PromptManager::Prompt.parameter_regex = Regexp.new(config.parameter_regex)
243
244
  end
244
245
 
246
+ if !config.prompt_id.empty? && config.prompt_id != config.pipeline.first
247
+ config.pipeline.prepend config.prompt_id
248
+ end
249
+
250
+ unless config.pipeline.empty?
251
+ config.pipeline.each do |prompt_id|
252
+ # Skip empty prompt IDs (can happen in chat-only mode)
253
+ next if prompt_id.nil? || prompt_id.empty?
254
+
255
+ prompt_file_path = File.join(config.prompts_dir, "#{prompt_id}.txt")
256
+ unless File.exist?(prompt_file_path)
257
+ STDERR.puts "Error: Prompt ID '#{prompt_id}' does not exist at #{prompt_file_path}"
258
+ and_exit = true
259
+ end
260
+ end
261
+ end
262
+
263
+ exit(1) if and_exit
264
+
245
265
  config
246
266
  end
247
267
 
@@ -337,7 +357,7 @@ module AIA
337
357
  " aia --chat [PROMPT_ID] [CONTEXT_FILE]*\n" +
338
358
  " aia --chat [CONTEXT_FILE]*"
339
359
 
340
- opts.on("--chat", "Begin a chat session with the LLM after the initial prompt response; will set --no-out_file so that the LLM response comes to STDOUT.") do
360
+ opts.on("--chat", "Begin a chat session with the LLM after processing all prompts in the pipeline.") do
341
361
  config.chat = true
342
362
  puts "Debug: Setting chat mode to true" if config.debug
343
363
  end
@@ -448,7 +468,7 @@ module AIA
448
468
  end
449
469
  end
450
470
 
451
- opts.on("-p", "--prompts_dir DIR", "Directory containing prompt files") do |dir|
471
+ opts.on("--prompts_dir DIR", "Directory containing prompt files") do |dir|
452
472
  config.prompts_dir = dir
453
473
  end
454
474
 
@@ -491,11 +511,13 @@ module AIA
491
511
  end
492
512
 
493
513
  opts.on("-n", "--next PROMPT_ID", "Next prompt to process") do |next_prompt|
494
- config.next = next_prompt
514
+ config.pipeline ||= []
515
+ config.pipeline << next_prompt
495
516
  end
496
517
 
497
- opts.on("--pipeline PROMPTS", "Pipeline of prompts to process") do |pipeline|
498
- config.pipeline = pipeline.split(',')
518
+ opts.on("-p PROMPTS", "--pipeline PROMPTS", "Pipeline of comma-seperated prompt IDs to process") do |pipeline|
519
+ config.pipeline ||= []
520
+ config.pipeline += pipeline.split(',').map(&:strip)
499
521
  end
500
522
 
501
523
  opts.on("-f", "--fuzzy", "Use fuzzy matching for prompt search") do
@@ -185,8 +185,10 @@ module AIA
185
185
  def pipeline(args = [], context_manager=nil)
186
186
  if args.empty?
187
187
  ap AIA.config.pipeline
188
+ elsif 1 == args.size
189
+ AIA.config.pipeline += args.first.split(',').map(&:strip).reject{|id| id.empty?}
188
190
  else
189
- AIA.config.pipeline += args.map {|id| id.gsub(',', '').strip}
191
+ AIA.config.pipeline += args.map{|id| id.gsub(',', '').strip}.reject{|id| id.empty?}
190
192
  end
191
193
  ''
192
194
  end
data/lib/aia/session.rb CHANGED
@@ -1,158 +1,208 @@
1
1
  # lib/aia/session.rb
2
2
 
3
- require 'tty-spinner'
4
- require 'tty-screen'
5
- require 'reline'
6
- require 'prompt_manager'
7
- require 'json'
8
- require 'fileutils'
9
- require 'amazing_print'
10
- require_relative 'directive_processor'
11
- require_relative 'history_manager'
12
- require_relative 'context_manager'
13
- require_relative 'ui_presenter'
14
- require_relative 'chat_processor_service'
15
- require_relative 'prompt_handler'
16
- require_relative 'utility'
3
+ require "tty-spinner"
4
+ require "tty-screen"
5
+ require "reline"
6
+ require "prompt_manager"
7
+ require "json"
8
+ require "fileutils"
9
+ require "amazing_print"
10
+ require_relative "directive_processor"
11
+ require_relative "history_manager"
12
+ require_relative "context_manager"
13
+ require_relative "ui_presenter"
14
+ require_relative "chat_processor_service"
15
+ require_relative "prompt_handler"
16
+ require_relative "utility"
17
17
 
18
18
  module AIA
19
19
  class Session
20
20
  KW_HISTORY_MAX = 5 # Maximum number of history entries per keyword
21
- TERSE_PROMPT = "\nKeep your response short and to the point.\n"
21
+ TERSE_PROMPT = "\nKeep your response short and to the point.\n"
22
22
 
23
23
  def initialize(prompt_handler)
24
- @prompt_handler = prompt_handler
24
+ @prompt_handler = prompt_handler
25
25
  @chat_prompt_id = nil # Initialize to nil
26
+ @include_context_flag = true # Flag to include context files, stdin, etc. only once
26
27
 
27
28
  # Special handling for chat mode with context files but no prompt ID
28
29
  if AIA.chat? && AIA.config.prompt_id.empty? && AIA.config.context_files && !AIA.config.context_files.empty?
29
- prompt_instance = nil
30
+ prompt_instance = nil
30
31
  @history_manager = nil
31
32
  elsif AIA.chat? && AIA.config.prompt_id.empty?
32
- prompt_instance = nil
33
+ prompt_instance = nil
33
34
  @history_manager = nil
34
35
  else
35
- prompt_instance = @prompt_handler.get_prompt(AIA.config.prompt_id)
36
+ prompt_instance = @prompt_handler.get_prompt(AIA.config.prompt_id)
36
37
  @history_manager = HistoryManager.new(prompt: prompt_instance)
37
38
  end
38
39
 
39
- @context_manager = ContextManager.new(system_prompt: AIA.config.system_prompt)
40
- @ui_presenter = UIPresenter.new
40
+ @context_manager = ContextManager.new(system_prompt: AIA.config.system_prompt)
41
+ @ui_presenter = UIPresenter.new
41
42
  @directive_processor = DirectiveProcessor.new
42
- @chat_processor = ChatProcessorService.new(@ui_presenter, @directive_processor)
43
+ @chat_processor = ChatProcessorService.new(@ui_presenter, @directive_processor)
43
44
 
44
45
  if AIA.config.out_file && !AIA.config.out_file.nil? && !AIA.append? && File.exist?(AIA.config.out_file)
45
- File.open(AIA.config.out_file, 'w') {} # Truncate the file
46
+ File.open(AIA.config.out_file, "w") { } # Truncate the file
46
47
  end
47
48
  end
48
49
 
49
- # Starts the session, processing the initial prompt and handling user
50
- # interactions. It manages the flow of prompts, context, and responses.
50
+ # Starts the session, processing all prompts in the pipeline and then
51
+ # optionally starting an interactive chat session.
51
52
  def start
52
- prompt_id = AIA.config.prompt_id
53
- role_id = AIA.config.role
53
+ # Handle special chat-only cases first
54
+ if should_start_chat_immediately?
55
+ AIA::Utility.robot
56
+ start_chat
57
+ return
58
+ end
54
59
 
55
- # Handle chat mode
60
+ # Process all prompts in the pipeline
61
+ process_all_prompts
62
+
63
+ # Start chat mode after all prompts are processed
56
64
  if AIA.chat?
57
65
  AIA::Utility.robot
58
- # If we're in chat mode with only context files, go straight to chat
59
- if prompt_id.empty? && role_id.empty? && AIA.config.context_files && !AIA.config.context_files.empty?
60
- start_chat
61
- return
62
- elsif prompt_id.empty? && role_id.empty?
63
- # Even with an empty prompt_id, we might have context files
64
- start_chat
65
- return
66
- end
66
+ @ui_presenter.display_separator
67
+ start_chat(skip_context_files: true)
67
68
  end
69
+ end
70
+
71
+ private
72
+
73
+ # Check if we should start chat immediately without processing any prompts
74
+ def should_start_chat_immediately?
75
+ return false unless AIA.chat?
76
+
77
+ # If pipeline is empty or only contains empty prompt_ids, go straight to chat
78
+ AIA.config.pipeline.empty? || AIA.config.pipeline.all? { |id| id.nil? || id.empty? }
79
+ end
80
+
81
+ # Process all prompts in the pipeline sequentially
82
+ def process_all_prompts
83
+ prompt_count = 0
84
+ total_prompts = AIA.config.pipeline.size
68
85
 
86
+ until AIA.config.pipeline.empty?
87
+ prompt_count += 1
88
+ prompt_id = AIA.config.pipeline.shift
69
89
 
70
- # --- Get and process the initial prompt ---
90
+ puts "\n--- Processing prompt #{prompt_count}/#{total_prompts}: #{prompt_id} ---" if AIA.verbose? && total_prompts > 1
91
+
92
+ process_single_prompt(prompt_id)
93
+ end
94
+ end
95
+
96
+ # Process a single prompt with all its requirements
97
+ def process_single_prompt(prompt_id)
98
+ role_id = AIA.config.role
99
+
100
+ # Skip empty prompt IDs
101
+ return if prompt_id.nil? || prompt_id.empty?
102
+
103
+ # Get and process the prompt
71
104
  begin
72
105
  prompt = @prompt_handler.get_prompt(prompt_id, role_id)
73
106
  rescue StandardError => e
74
- puts "Error: #{e.message}"
107
+ puts "Error processing prompt '#{prompt_id}': #{e.message}"
75
108
  return
76
109
  end
77
110
 
78
- # Collect variable values if needed
111
+ # Collect variable values (only when flag is true)
112
+ if @include_context_flag
113
+ collect_variable_values(prompt)
114
+ enhance_prompt_with_extras(prompt)
115
+ end
116
+
117
+ # Get final prompt text
118
+ prompt_text = prompt.to_s
119
+
120
+ # Add context files (only when flag is true)
121
+ if @include_context_flag
122
+ prompt_text = add_context_files(prompt_text)
123
+ # SMELL: TODO? empty the AIA.config.context_files array
124
+ @include_context_flag = false
125
+ end
126
+
127
+ # Send prompt to AI and get response
128
+ send_prompt_and_get_response(prompt_text)
129
+ end
130
+
131
+ # Collect variable values from user input
132
+ def collect_variable_values(prompt)
79
133
  variables = prompt.parameters.keys
134
+ return if variables.nil? || variables.empty?
80
135
 
81
- if variables && !variables.empty?
82
- variable_values = {}
83
- history_manager = AIA::HistoryManager.new prompt: prompt
84
-
85
- variables.each do |var_name|
86
- # History is based on the prompt ID and the variable name (without brackets)
87
- history = prompt.parameters[var_name]
88
-
89
- # Ask the user for the variable
90
- value = history_manager.request_variable_value(
91
- variable_name: var_name,
92
- history_values: history
93
- )
94
- # Store the value using the original BRACKETED key from prompt.parameters
95
- if history.include? value
96
- history.delete(value)
97
- end
98
- history << value
99
- if history.size > HistoryManager::MAX_VARIABLE_HISTORY
100
- history.shift
101
- end
102
- variable_values[var_name] = history
103
- end
136
+ variable_values = {}
137
+ history_manager = AIA::HistoryManager.new prompt: prompt
104
138
 
105
- # Assign collected values back for prompt_manager substitution
106
- prompt.parameters = variable_values
139
+ variables.each do |var_name|
140
+ history = prompt.parameters[var_name]
141
+
142
+ value = history_manager.request_variable_value(
143
+ variable_name: var_name,
144
+ history_values: history,
145
+ )
146
+
147
+ # Update history
148
+ history.delete(value) if history.include?(value)
149
+ history << value
150
+ history.shift if history.size > HistoryManager::MAX_VARIABLE_HISTORY
151
+
152
+ variable_values[var_name] = history
107
153
  end
108
154
 
155
+ prompt.parameters = variable_values
156
+ end
157
+
158
+ # Add terse instructions, stdin content, and executable prompt file content
159
+ def enhance_prompt_with_extras(prompt)
109
160
  # Add terse instruction if needed
110
- if AIA.terse?
111
- prompt.text << TERSE_PROMPT
112
- end
161
+ prompt.text << TERSE_PROMPT if AIA.terse?
113
162
 
163
+ # Add STDIN content
114
164
  if AIA.config.stdin_content && !AIA.config.stdin_content.strip.empty?
115
165
  prompt.text << "\n\n" << AIA.config.stdin_content
116
166
  end
117
167
 
168
+ # Add executable prompt file content
118
169
  if AIA.config.executable_prompt_file
119
170
  prompt.text << "\n\n" << File.read(AIA.config.executable_prompt_file)
120
- .lines[1..]
121
- .join
171
+ .lines[1..]
172
+ .join
122
173
  end
174
+ end
123
175
 
124
- # Substitute variables, execute dynamic content and get final prompt text
125
- prompt_text = prompt.to_s
176
+ # Add context files to prompt text
177
+ def add_context_files(prompt_text)
178
+ return prompt_text unless AIA.config.context_files && !AIA.config.context_files.empty?
126
179
 
127
- # Add context files if any
128
- if AIA.config.context_files && !AIA.config.context_files.empty?
129
- context = AIA.config.context_files.map do |file|
130
- File.read(file) rescue "Error reading file: #{file}"
131
- end.join("\n\n")
132
- prompt_text = "#{prompt_text}\n\nContext:\n#{context}"
133
- end
180
+ context = AIA.config.context_files.map do |file|
181
+ File.read(file) rescue "Error reading file: #{file}"
182
+ end.join("\n\n")
134
183
 
135
- # Add initial user prompt to context *before* sending to AI
136
- @context_manager.add_to_context(role: 'user', content: prompt_text)
184
+ "#{prompt_text}\n\nContext:\n#{context}"
185
+ end
137
186
 
138
- # Process the initial prompt
187
+ # Send prompt to AI and handle the response
188
+ def send_prompt_and_get_response(prompt_text)
189
+ # Add prompt to conversation context
190
+ @context_manager.add_to_context(role: "user", content: prompt_text)
191
+
192
+ # Process the prompt
139
193
  @ui_presenter.display_thinking_animation
140
- # Send the current context (which includes the user prompt)
141
194
  response = @chat_processor.process_prompt(@context_manager.get_context)
142
195
 
143
196
  # Add AI response to context
144
- @context_manager.add_to_context(role: 'assistant', content: response)
197
+ @context_manager.add_to_context(role: "assistant", content: response)
145
198
 
146
199
  # Output the response
147
- @chat_processor.output_response(response) # Handles display
148
-
149
- # Process next prompts/pipeline (if any)
150
- @chat_processor.process_next_prompts(response, @prompt_handler)
200
+ @chat_processor.output_response(response)
151
201
 
152
- # --- Enter chat mode AFTER processing initial prompt ---
153
- if AIA.chat?
154
- @ui_presenter.display_separator # Add separator
155
- start_chat(skip_context_files: true) # start_chat will use the now populated context
202
+ # Process any directives in the response
203
+ if @directive_processor.directive?(response)
204
+ directive_result = @directive_processor.process(response, @context_manager)
205
+ puts "\nDirective output: #{directive_result}" if directive_result && !directive_result.strip.empty?
156
206
  end
157
207
  end
158
208
 
@@ -165,14 +215,14 @@ module AIA
165
215
 
166
216
  # Generate chat prompt ID
167
217
  now = Time.now
168
- @chat_prompt_id = "chat_#{now.strftime('%Y%m%d_%H%M%S')}"
218
+ @chat_prompt_id = "chat_#{now.strftime("%Y%m%d_%H%M%S")}"
169
219
 
170
220
  # Create the temporary prompt
171
221
  begin
172
222
  # Create the unique? prompt ID in the file storage system with its initial text
173
223
  PromptManager::Prompt.create(
174
224
  id: @chat_prompt_id,
175
- text: "Today's date is #{now.strftime('%Y-%m-%d')} and the current time is #{now.strftime('%H:%M:%S')}"
225
+ text: "Today's date is #{now.strftime("%Y-%m-%d")} and the current time is #{now.strftime("%H:%M:%S")}",
176
226
  )
177
227
 
178
228
  # Capture self for the handlers
@@ -180,7 +230,7 @@ module AIA
180
230
 
181
231
  # Set up cleanup handlers only after prompt is created
182
232
  at_exit { session_instance.send(:cleanup_chat_prompt) }
183
- Signal.trap('INT') {
233
+ Signal.trap("INT") {
184
234
  session_instance.send(:cleanup_chat_prompt)
185
235
  exit
186
236
  }
@@ -190,9 +240,9 @@ module AIA
190
240
  @chat_prompt = PromptManager::Prompt.new(
191
241
  id: @chat_prompt_id,
192
242
  directives_processor: @directive_processor,
193
- erb_flag: true,
194
- envar_flag: true,
195
- external_binding: binding,
243
+ erb_flag: true,
244
+ envar_flag: true,
245
+ external_binding: binding,
196
246
  )
197
247
 
198
248
  Reline::HISTORY.clear
@@ -205,14 +255,14 @@ module AIA
205
255
 
206
256
  if !context.empty?
207
257
  # Add context files content to context
208
- @context_manager.add_to_context(role: 'user', content: context)
258
+ @context_manager.add_to_context(role: "user", content: context)
209
259
 
210
260
  # Process the context
211
261
  @ui_presenter.display_thinking_animation
212
262
  response = @chat_processor.process_prompt(@context_manager.get_context)
213
263
 
214
264
  # Add AI response to context
215
- @context_manager.add_to_context(role: 'assistant', content: response)
265
+ @context_manager.add_to_context(role: "assistant", content: response)
216
266
 
217
267
  # Output the response
218
268
  @chat_processor.output_response(response)
@@ -225,18 +275,18 @@ module AIA
225
275
  if !STDIN.tty?
226
276
  original_stdin = STDIN.dup
227
277
  piped_input = STDIN.read.strip
228
- STDIN.reopen('/dev/tty')
278
+ STDIN.reopen("/dev/tty")
229
279
 
230
280
  if !piped_input.empty?
231
281
  @chat_prompt.text = piped_input
232
282
  processed_input = @chat_prompt.to_s
233
283
 
234
- @context_manager.add_to_context(role: 'user', content: processed_input)
284
+ @context_manager.add_to_context(role: "user", content: processed_input)
235
285
 
236
286
  @ui_presenter.display_thinking_animation
237
287
  response = @chat_processor.process_prompt(@context_manager.get_context)
238
288
 
239
- @context_manager.add_to_context(role: 'assistant', content: response)
289
+ @context_manager.add_to_context(role: "assistant", content: response)
240
290
  @chat_processor.output_response(response)
241
291
  @chat_processor.speak(response) if AIA.speak?
242
292
  @ui_presenter.display_separator
@@ -249,10 +299,10 @@ module AIA
249
299
  loop do
250
300
  follow_up_prompt = @ui_presenter.ask_question
251
301
 
252
- break if follow_up_prompt.nil? || follow_up_prompt.strip.downcase == 'exit' || follow_up_prompt.strip.empty?
302
+ break if follow_up_prompt.nil? || follow_up_prompt.strip.downcase == "exit" || follow_up_prompt.strip.empty?
253
303
 
254
304
  if AIA.config.out_file
255
- File.open(AIA.config.out_file, 'a') do |file|
305
+ File.open(AIA.config.out_file, "a") do |file|
256
306
  file.puts "\nYou: #{follow_up_prompt}"
257
307
  end
258
308
  end
@@ -260,28 +310,28 @@ module AIA
260
310
  if @directive_processor.directive?(follow_up_prompt)
261
311
  directive_output = @directive_processor.process(follow_up_prompt, @context_manager)
262
312
 
263
- if follow_up_prompt.strip.start_with?('//clear')
264
- # The directive processor has called context_manager.clear_context
265
- # but we need a more aggressive approach to fully clear all context
313
+ if follow_up_prompt.strip.start_with?("//clear")
314
+ # The directive processor has called context_manager.clear_context
315
+ # but we need a more aggressive approach to fully clear all context
266
316
 
267
- # First, clear the context manager's context
268
- @context_manager.clear_context(keep_system_prompt: true)
317
+ # First, clear the context manager's context
318
+ @context_manager.clear_context(keep_system_prompt: true)
269
319
 
270
- # Second, try clearing the client's context
271
- if AIA.config.client && AIA.config.client.respond_to?(:clear_context)
272
- AIA.config.client.clear_context
273
- end
320
+ # Second, try clearing the client's context
321
+ if AIA.config.client && AIA.config.client.respond_to?(:clear_context)
322
+ AIA.config.client.clear_context
323
+ end
274
324
 
275
- # Third, completely reinitialize the client to ensure fresh state
276
- # This is the most aggressive approach to ensure no context remains
277
- begin
278
- AIA.config.client = AIA::RubyLLMAdapter.new
279
- rescue => e
280
- STDERR.puts "Error reinitializing client: #{e.message}"
281
- end
325
+ # Third, completely reinitialize the client to ensure fresh state
326
+ # This is the most aggressive approach to ensure no context remains
327
+ begin
328
+ AIA.config.client = AIA::RubyLLMAdapter.new
329
+ rescue => e
330
+ STDERR.puts "Error reinitializing client: #{e.message}"
331
+ end
282
332
 
283
- @ui_presenter.display_info("Chat context cleared.")
284
- next
333
+ @ui_presenter.display_info("Chat context cleared.")
334
+ next
285
335
  elsif directive_output.nil? || directive_output.strip.empty?
286
336
  next
287
337
  else
@@ -293,19 +343,18 @@ module AIA
293
343
  @chat_prompt.text = follow_up_prompt
294
344
  processed_prompt = @chat_prompt.to_s
295
345
 
296
- @context_manager.add_to_context(role: 'user', content: processed_prompt)
346
+ @context_manager.add_to_context(role: "user", content: processed_prompt)
297
347
  conversation = @context_manager.get_context
298
348
 
299
349
  @ui_presenter.display_thinking_animation
300
350
  response = @chat_processor.process_prompt(conversation)
301
351
 
302
352
  @ui_presenter.display_ai_response(response)
303
- @context_manager.add_to_context(role: 'assistant', content: response)
353
+ @context_manager.add_to_context(role: "assistant", content: response)
304
354
  @chat_processor.speak(response)
305
355
 
306
356
  @ui_presenter.display_separator
307
357
  end
308
-
309
358
  ensure
310
359
  @ui_presenter.display_chat_end
311
360
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: aia
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.9.7
4
+ version: 0.9.8
5
5
  platform: ruby
6
6
  authors:
7
7
  - Dewayne VanHoozer
@@ -57,14 +57,14 @@ dependencies:
57
57
  requirements:
58
58
  - - ">="
59
59
  - !ruby/object:Gem::Version
60
- version: 0.5.6
60
+ version: 0.5.7
61
61
  type: :runtime
62
62
  prerelease: false
63
63
  version_requirements: !ruby/object:Gem::Requirement
64
64
  requirements:
65
65
  - - ">="
66
66
  - !ruby/object:Gem::Version
67
- version: 0.5.6
67
+ version: 0.5.7
68
68
  - !ruby/object:Gem::Dependency
69
69
  name: ruby_llm
70
70
  requirement: !ruby/object:Gem::Requirement
@@ -318,6 +318,7 @@ files:
318
318
  - examples/headlines
319
319
  - examples/tools/edit_file.rb
320
320
  - examples/tools/list_files.rb
321
+ - examples/tools/pdf_page_reader.rb
321
322
  - examples/tools/read_file.rb
322
323
  - examples/tools/run_shell_command.rb
323
324
  - images/aia.png