aia 0.8.0 → 0.8.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 3a77cfcc71cea66fafd6eb8c17666eee983f7903a60f528e3bb396d8bdac0488
4
- data.tar.gz: c9424c2a40f886a231b10fe569276ba0b8e3e9feb435b38a0257852a91103629
3
+ metadata.gz: fd634957d83d1ea9d6a15fd9b82e5e535ea54952a16505490e49dd89aa1cd09e
4
+ data.tar.gz: 3c53dddc42841b8bd8106a7b108ebc86a5b358a1784edef20a08ba58c659ab45
5
5
  SHA512:
6
- metadata.gz: 6f96bdaafd932c34637cbd05415dc4a5c856ecaf386780426d10db5f1dade03a2368c7a767698c385754467b3882bf50faa972a8dedfcd0a5b040777b86d4bb7
7
- data.tar.gz: 0cfd3aa1b9750a7ae83dbb21ef737647f64b9cf67cb2a428864b7f22edbccaf317c9dbee205e2995cfdb3809b883f792722aac281a82869477caba3e70d04d23
6
+ metadata.gz: 39e70a7f9a50fbad4885e181fbaa7497d0a0d997efcf6354212f4c433e588598dc40716acd1842fe82ee11de6f5e22d5157a9fdb7fe5e5f9bdea03d912a7179b
7
+ data.tar.gz: bd7e23daeca3400394c6c0bdcb3b55eecf01fe61ee7a98441bc18a92e769e8a59953dda51800167f8a5212f5f7abc8657311d5a97df20d2b5e7338ec80279c4f
data/.version CHANGED
@@ -1 +1 @@
1
- 0.8.0
1
+ 0.8.2
data/CHANGELOG.md CHANGED
@@ -3,6 +3,18 @@
3
3
 
4
4
  ## Released
5
5
 
6
+ ### [0.8.2] 2025-04-18
7
+ - fixed problems with pre-loaded context and chat repl
8
+ - piped content into `aia --chat` is now a part of the context/instructions
9
+ - content via "aia --chat < some_file" is added to the context/instructions
10
+ - `aia --chat context_file.txt context_file2.txt` now works
11
+ - `aia --chat prompt_id context)file.txt` also works
12
+
13
+
14
+
15
+ ### [0.8.1] 2025-04-17
16
+ - bumped version to 0.8.1 after correcting merge conflicts
17
+
6
18
  ### [0.8.0] WIP - 2025-04-15
7
19
  - Updated PromptManager to v0.5.1 which has some of the functionality that was originally developed in the AIA.
8
20
  - Enhanced README.md to include a comprehensive table of configuration options with defaults and associated environment variables.
data/README.md CHANGED
@@ -7,7 +7,7 @@
7
7
  (\____/) AI Assistant interaction with AI models. It automates the
8
8
  (_oo_) Fancy LLM management of pre-compositional prompts and
9
9
  (O) is Online executes generative AI (Gen-AI) commands on those
10
- __||__ \) prompts. AIA includes enhanced feathres such as
10
+ __||__ \) prompts. AIA includes enhanced features such as
11
11
  [/______\] / * embedded directives * shell integration
12
12
  / \__AI__/ \/ * embedded Ruby * history management
13
13
  / /__\ * interactive chat * prompt workflows
data/lib/aia/config.rb CHANGED
@@ -146,11 +146,16 @@ module AIA
146
146
 
147
147
  exit if and_exit
148
148
 
149
- # Only require a prompt_id if we're not in chat mode and not using fuzzy search
150
- if !config.chat && !config.fuzzy && config.prompt_id.empty?
151
- STDERR.puts "Error: A prompt ID is required unless using --chat or --fuzzy. Use -h or --help for help."
149
+ # Only require a prompt_id if we're not in chat mode, not using fuzzy search, and no context files
150
+ if !config.chat && !config.fuzzy && config.prompt_id.empty? && (!config.context_files || config.context_files.empty?)
151
+ STDERR.puts "Error: A prompt ID is required unless using --chat, --fuzzy, or providing context files. Use -h or --help for help."
152
152
  exit 1
153
153
  end
154
+
155
+ # If we're in chat mode with context files but no prompt_id, that's valid
156
+ if config.chat && config.prompt_id.empty? && config.context_files && !config.context_files.empty?
157
+ # This is a valid use case - no action needed
158
+ end
154
159
 
155
160
  # Tailor the PromptManager::Prompt
156
161
  if config.parameter_regex
@@ -192,7 +197,9 @@ module AIA
192
197
  config = OpenStruct.new
193
198
 
194
199
  opt_parser = OptionParser.new do |opts|
195
- opts.banner = "Usage: aia [options] PROMPT_ID [CONTEXT_FILE]*"
200
+ opts.banner = "Usage: aia [options] [PROMPT_ID] [CONTEXT_FILE]*\n" +
201
+ " aia --chat [PROMPT_ID] [CONTEXT_FILE]*\n" +
202
+ " aia --chat [CONTEXT_FILE]*"
196
203
 
197
204
  opts.on("--chat", "Begin a chat session with the LLM after the initial prompt response; will set --no-out_file so that the LLM response comes to STDOUT.") do
198
205
  config.chat = true
@@ -391,14 +398,24 @@ module AIA
391
398
  exit 1
392
399
  end
393
400
 
394
- # First remaining arg is the prompt ID
401
+ # Handle remaining args
395
402
  unless remaining_args.empty?
396
- config.prompt_id = remaining_args.shift
403
+ # If in chat mode and all args are existing files, treat them all as context files
404
+ if config.chat && remaining_args.all? { |arg| File.exist?(arg) }
405
+ config.context_files = remaining_args
406
+ # If first arg is empty string and we're in chat mode, treat all args as context files
407
+ elsif config.chat && remaining_args.first == ""
408
+ remaining_args.shift # Remove the empty string
409
+ config.context_files = remaining_args unless remaining_args.empty?
410
+ else
411
+ # First remaining arg is the prompt ID
412
+ config.prompt_id = remaining_args.shift
413
+
414
+ # Remaining args are context files
415
+ config.context_files = remaining_args unless remaining_args.empty?
416
+ end
397
417
  end
398
418
 
399
- # Remaining args are context files
400
- config.context_files = remaining_args unless remaining_args.empty?
401
-
402
419
 
403
420
  config
404
421
  end
data/lib/aia/session.rb CHANGED
@@ -23,7 +23,11 @@ module AIA
23
23
  def initialize(prompt_handler)
24
24
  @prompt_handler = prompt_handler
25
25
 
26
- if AIA.chat? && AIA.config.prompt_id.empty?
26
+ # Special handling for chat mode with context files but no prompt ID
27
+ if AIA.chat? && AIA.config.prompt_id.empty? && AIA.config.context_files && !AIA.config.context_files.empty?
28
+ prompt_instance = nil
29
+ @history_manager = nil
30
+ elsif AIA.chat? && AIA.config.prompt_id.empty?
27
31
  prompt_instance = nil
28
32
  @history_manager = nil
29
33
  else
@@ -47,10 +51,15 @@ module AIA
47
51
  prompt_id = AIA.config.prompt_id
48
52
  role_id = AIA.config.role
49
53
 
50
- # Handle chat mode *only* if NO initial prompt is given
54
+ # Handle chat mode
51
55
  if AIA.chat?
52
56
  AIA::Utility.robot
53
- if prompt_id.empty? && role_id.empty?
57
+ # If we're in chat mode with only context files, go straight to chat
58
+ if prompt_id.empty? && role_id.empty? && AIA.config.context_files && !AIA.config.context_files.empty?
59
+ start_chat
60
+ return
61
+ elsif prompt_id.empty? && role_id.empty?
62
+ # Even with an empty prompt_id, we might have context files
54
63
  start_chat
55
64
  return
56
65
  end
@@ -137,22 +146,78 @@ module AIA
137
146
  # --- Enter chat mode AFTER processing initial prompt ---
138
147
  if AIA.chat?
139
148
  @ui_presenter.display_separator # Add separator
140
- start_chat # start_chat will use the now populated context
149
+ start_chat(skip_context_files: true) # start_chat will use the now populated context
141
150
  end
142
151
  end
143
152
 
144
153
  # Starts the interactive chat session.
145
- def start_chat
154
+ def start_chat(skip_context_files: false)
146
155
  # Consider if display_chat_header is needed if robot+separator already shown
147
156
  # For now, let's keep it, maybe add an indicator message
148
157
  puts "\nEntering interactive chat mode..."
149
158
  @ui_presenter.display_chat_header
150
159
 
151
160
  Reline::HISTORY.clear # Keep Reline history for user input editing, separate from chat context
161
+
162
+ # Load context files if any and not skipping
163
+ if !skip_context_files && AIA.config.context_files && !AIA.config.context_files.empty?
164
+ context_content = AIA.config.context_files.map do |file|
165
+ File.read(file) rescue "Error reading file: #{file}"
166
+ end.join("\n\n")
167
+
168
+ if !context_content.empty?
169
+ # Add context files content to context
170
+ @context_manager.add_to_context(role: 'user', content: context_content)
171
+
172
+ # Process the context
173
+ operation_type = @chat_processor.determine_operation_type(AIA.config.model)
174
+ @ui_presenter.display_thinking_animation
175
+ response = @chat_processor.process_prompt(@context_manager.get_context, operation_type)
176
+
177
+ # Add AI response to context
178
+ @context_manager.add_to_context(role: 'assistant', content: response)
179
+
180
+ # Output the response
181
+ @chat_processor.output_response(response)
182
+ @chat_processor.speak(response)
183
+ @ui_presenter.display_separator
184
+ end
185
+ end
186
+
187
+ # Check for piped input (STDIN not a TTY and has data)
188
+ if !STDIN.tty?
189
+ # Save the original STDIN
190
+ orig_stdin = STDIN.dup
191
+
192
+ # Read the piped input
193
+ piped_input = STDIN.read.strip
194
+
195
+ # Reopen STDIN to the terminal
196
+ STDIN.reopen('/dev/tty')
197
+
198
+ if !piped_input.empty?
199
+ # Add piped input to context
200
+ @context_manager.add_to_context(role: 'user', content: piped_input)
201
+
202
+ # Process the piped input
203
+ operation_type = @chat_processor.determine_operation_type(AIA.config.model)
204
+ @ui_presenter.display_thinking_animation
205
+ response = @chat_processor.process_prompt(@context_manager.get_context, operation_type)
206
+
207
+ # Add AI response to context
208
+ @context_manager.add_to_context(role: 'assistant', content: response)
209
+
210
+ # Output the response
211
+ @chat_processor.output_response(response)
212
+ @chat_processor.speak(response)
213
+ @ui_presenter.display_separator
214
+ end
215
+ end
152
216
 
153
217
  loop do
154
218
  # Get user input
155
219
  prompt = @ui_presenter.ask_question
220
+
156
221
 
157
222
 
158
223
  break if prompt.nil? || prompt.strip.downcase == 'exit' || prompt.strip.empty?
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: aia
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.8.0
4
+ version: 0.8.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Dewayne VanHoozer