aia 0.8.1 → 0.8.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.version +1 -1
- data/CHANGELOG.md +12 -0
- data/README.md +1 -1
- data/lib/aia/config.rb +26 -9
- data/lib/aia/directive_processor.rb +36 -17
- data/lib/aia/session.rb +71 -10
- metadata +1 -2
- data/lib/aia/tools/ai_client_backend.rb +0 -92
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 15a24480e7b5958570ae21ef3e3f958cdb991a1f03cb88ebe3e8ca52a6029f9a
|
4
|
+
data.tar.gz: dcef2697a2d2fdec85e371421a5c16a48c70d3688ea7bb6646f001df5e795d68
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 97f8c423247051e31eef8b6a66fdde22fe72438097da41bb30ec53ef56508266e07f5502fa85cf8cfb63ebad22e995d7c7b4cbe55745f20e87b2b535cbd0b55c
|
7
|
+
data.tar.gz: c48ebed9374edc9a8dcf86b2dd592031c1bef02b60f5d61e113a9c90bf31865030231ebc267a6308ca46d71f3e864d02e44bfc93f221112102309ddf2186d563
|
data/.version
CHANGED
@@ -1 +1 @@
|
|
1
|
-
0.8.
|
1
|
+
0.8.3
|
data/CHANGELOG.md
CHANGED
@@ -3,6 +3,18 @@
|
|
3
3
|
|
4
4
|
## Released
|
5
5
|
|
6
|
+
### [0.8.2] 2025-04-18
|
7
|
+
- fixed problems with pre-loaded context and chat repl
|
8
|
+
- piped content into `aia --chat` is now a part of the context/instructions
|
9
|
+
- content via "aia --chat < some_file" is added to the context/instructions
|
10
|
+
- `aia --chat context_file.txt context_file2.txt` now works
|
11
|
+
- `aia --chat prompt_id context)file.txt` also works
|
12
|
+
|
13
|
+
|
14
|
+
|
15
|
+
### [0.8.1] 2025-04-17
|
16
|
+
- bumped version to 0.8.1 after correcting merge conflicts
|
17
|
+
|
6
18
|
### [0.8.0] WIP - 2025-04-15
|
7
19
|
- Updated PromptManager to v0.5.1 which has some of the functionality that was originally developed in the AIA.
|
8
20
|
- Enhanced README.md to include a comprehensive table of configuration options with defaults and associated environment variables.
|
data/README.md
CHANGED
@@ -7,7 +7,7 @@
|
|
7
7
|
(\____/) AI Assistant interaction with AI models. It automates the
|
8
8
|
(_oo_) Fancy LLM management of pre-compositional prompts and
|
9
9
|
(O) is Online executes generative AI (Gen-AI) commands on those
|
10
|
-
__||__ \) prompts. AIA includes enhanced
|
10
|
+
__||__ \) prompts. AIA includes enhanced features such as
|
11
11
|
[/______\] / * embedded directives * shell integration
|
12
12
|
/ \__AI__/ \/ * embedded Ruby * history management
|
13
13
|
/ /__\ * interactive chat * prompt workflows
|
data/lib/aia/config.rb
CHANGED
@@ -146,11 +146,16 @@ module AIA
|
|
146
146
|
|
147
147
|
exit if and_exit
|
148
148
|
|
149
|
-
# Only require a prompt_id if we're not in chat mode
|
150
|
-
if !config.chat && !config.fuzzy && config.prompt_id.empty?
|
151
|
-
STDERR.puts "Error: A prompt ID is required unless using --chat or
|
149
|
+
# Only require a prompt_id if we're not in chat mode, not using fuzzy search, and no context files
|
150
|
+
if !config.chat && !config.fuzzy && config.prompt_id.empty? && (!config.context_files || config.context_files.empty?)
|
151
|
+
STDERR.puts "Error: A prompt ID is required unless using --chat, --fuzzy, or providing context files. Use -h or --help for help."
|
152
152
|
exit 1
|
153
153
|
end
|
154
|
+
|
155
|
+
# If we're in chat mode with context files but no prompt_id, that's valid
|
156
|
+
if config.chat && config.prompt_id.empty? && config.context_files && !config.context_files.empty?
|
157
|
+
# This is a valid use case - no action needed
|
158
|
+
end
|
154
159
|
|
155
160
|
# Tailor the PromptManager::Prompt
|
156
161
|
if config.parameter_regex
|
@@ -192,7 +197,9 @@ module AIA
|
|
192
197
|
config = OpenStruct.new
|
193
198
|
|
194
199
|
opt_parser = OptionParser.new do |opts|
|
195
|
-
opts.banner = "Usage: aia [options] PROMPT_ID [CONTEXT_FILE]
|
200
|
+
opts.banner = "Usage: aia [options] [PROMPT_ID] [CONTEXT_FILE]*\n" +
|
201
|
+
" aia --chat [PROMPT_ID] [CONTEXT_FILE]*\n" +
|
202
|
+
" aia --chat [CONTEXT_FILE]*"
|
196
203
|
|
197
204
|
opts.on("--chat", "Begin a chat session with the LLM after the initial prompt response; will set --no-out_file so that the LLM response comes to STDOUT.") do
|
198
205
|
config.chat = true
|
@@ -391,14 +398,24 @@ module AIA
|
|
391
398
|
exit 1
|
392
399
|
end
|
393
400
|
|
394
|
-
#
|
401
|
+
# Handle remaining args
|
395
402
|
unless remaining_args.empty?
|
396
|
-
|
403
|
+
# If in chat mode and all args are existing files, treat them all as context files
|
404
|
+
if config.chat && remaining_args.all? { |arg| File.exist?(arg) }
|
405
|
+
config.context_files = remaining_args
|
406
|
+
# If first arg is empty string and we're in chat mode, treat all args as context files
|
407
|
+
elsif config.chat && remaining_args.first == ""
|
408
|
+
remaining_args.shift # Remove the empty string
|
409
|
+
config.context_files = remaining_args unless remaining_args.empty?
|
410
|
+
else
|
411
|
+
# First remaining arg is the prompt ID
|
412
|
+
config.prompt_id = remaining_args.shift
|
413
|
+
|
414
|
+
# Remaining args are context files
|
415
|
+
config.context_files = remaining_args unless remaining_args.empty?
|
416
|
+
end
|
397
417
|
end
|
398
418
|
|
399
|
-
# Remaining args are context files
|
400
|
-
config.context_files = remaining_args unless remaining_args.empty?
|
401
|
-
|
402
419
|
|
403
420
|
config
|
404
421
|
end
|
@@ -63,7 +63,7 @@ module AIA
|
|
63
63
|
if EXCLUDED_METHODS.include?(method_name)
|
64
64
|
return "Error: #{method_name} is not a valid directive: #{key}"
|
65
65
|
elsif respond_to?(method_name, true)
|
66
|
-
return send(method_name, args)
|
66
|
+
return send(method_name, args, context_manager)
|
67
67
|
else
|
68
68
|
return "Error: Unknown directive '#{key}'"
|
69
69
|
end
|
@@ -125,8 +125,9 @@ module AIA
|
|
125
125
|
end
|
126
126
|
|
127
127
|
desc "Inserts the contents of a file Example: //include path/to/file"
|
128
|
-
def include(args)
|
129
|
-
|
128
|
+
def include(args, context_manager=nil)
|
129
|
+
# echo takes care of envars and tilde expansion
|
130
|
+
file_path = `echo #{args.shift}`.strip
|
130
131
|
|
131
132
|
if @included_files.include?(file_path)
|
132
133
|
""
|
@@ -143,7 +144,7 @@ module AIA
|
|
143
144
|
alias_method :import, :include
|
144
145
|
|
145
146
|
desc "Without arguments it will print a list of all config items and their values _or_ //config item (for one item's value) _or_ //config item = value (to set a value of an item)"
|
146
|
-
def config(args = [])
|
147
|
+
def config(args = [], context_manager=nil)
|
147
148
|
args = Array(args)
|
148
149
|
|
149
150
|
if args.empty?
|
@@ -171,33 +172,42 @@ module AIA
|
|
171
172
|
alias_method :cfg, :config
|
172
173
|
|
173
174
|
desc "Shortcut for //config top_p _and_ //config top_p = value"
|
174
|
-
def top_p(
|
175
|
-
send(:config, args.prepend('top_p'))
|
175
|
+
def top_p(args, context_manager=nil)
|
176
|
+
send(:config, args.prepend('top_p'), context_manager)
|
176
177
|
end
|
177
178
|
alias_method :topp, :top_p
|
178
179
|
|
180
|
+
desc "Review the current context"
|
181
|
+
def review(args, context_manager=nil)
|
182
|
+
ap context_manager.get_context
|
183
|
+
''
|
184
|
+
end
|
185
|
+
alias_method :context, :review
|
186
|
+
|
179
187
|
desc "Shortcut for //config model _and_ //config model = value"
|
180
|
-
def model(
|
181
|
-
send(:config, args.prepend('model'))
|
188
|
+
def model(args, context_manager=nil)
|
189
|
+
send(:config, args.prepend('model'), context_manager)
|
182
190
|
end
|
183
191
|
|
184
192
|
desc "Shortcut for //config temperature _and_ //config temperature = value"
|
185
|
-
def temperature(
|
186
|
-
send(:config, args.prepend('temperature'))
|
193
|
+
def temperature(args, context_manager=nil)
|
194
|
+
send(:config, args.prepend('temperature'), context_manager)
|
187
195
|
end
|
188
196
|
alias_method :temp, :temperature
|
189
197
|
|
190
198
|
desc "Clears the conversation history (aka context) same as //config clear = true"
|
191
|
-
def clear(args, context_manager)
|
199
|
+
def clear(args, context_manager=nil)
|
192
200
|
if context_manager.nil?
|
193
201
|
return "Error: Context manager not available for //clear directive."
|
194
202
|
end
|
203
|
+
|
195
204
|
context_manager.clear_context
|
196
|
-
|
205
|
+
|
206
|
+
''
|
197
207
|
end
|
198
208
|
|
199
209
|
desc "Shortcut for a one line of ruby code; result is added to the context"
|
200
|
-
def ruby(
|
210
|
+
def ruby(args, context_manager=nil)
|
201
211
|
ruby_code = args.join(' ')
|
202
212
|
|
203
213
|
begin
|
@@ -211,25 +221,34 @@ module AIA
|
|
211
221
|
end
|
212
222
|
alias_method :rb, :ruby
|
213
223
|
|
224
|
+
|
225
|
+
desc "Executes one line of shell code; result is added to the context"
|
226
|
+
def shell(args, context_manager=nil)
|
227
|
+
shell_code = args.join(' ')
|
228
|
+
|
229
|
+
`#{shell_code}`
|
230
|
+
end
|
231
|
+
alias_method :sh, :shell
|
232
|
+
|
214
233
|
desc "Use the system's say command to speak text //say some text"
|
215
|
-
def say(
|
234
|
+
def say(args, context_manager=nil)
|
216
235
|
`say #{args.join(' ')}`
|
217
236
|
""
|
218
237
|
end
|
219
238
|
|
220
239
|
desc "Inserts an instruction to keep responses short and to the point."
|
221
|
-
def terse(
|
240
|
+
def terse(args, context_manager=nil)
|
222
241
|
AIA::Session::TERSE_PROMPT
|
223
242
|
end
|
224
243
|
|
225
244
|
desc "Display the ASCII art AIA robot."
|
226
|
-
def robot(
|
245
|
+
def robot(args, context_manager=nil)
|
227
246
|
AIA::Utility.robot
|
228
247
|
""
|
229
248
|
end
|
230
249
|
|
231
250
|
desc "Generates this help content"
|
232
|
-
def help(
|
251
|
+
def help(args=nil, context_manager=nil)
|
233
252
|
puts
|
234
253
|
puts "Available Directives"
|
235
254
|
puts "===================="
|
data/lib/aia/session.rb
CHANGED
@@ -23,7 +23,11 @@ module AIA
|
|
23
23
|
def initialize(prompt_handler)
|
24
24
|
@prompt_handler = prompt_handler
|
25
25
|
|
26
|
-
|
26
|
+
# Special handling for chat mode with context files but no prompt ID
|
27
|
+
if AIA.chat? && AIA.config.prompt_id.empty? && AIA.config.context_files && !AIA.config.context_files.empty?
|
28
|
+
prompt_instance = nil
|
29
|
+
@history_manager = nil
|
30
|
+
elsif AIA.chat? && AIA.config.prompt_id.empty?
|
27
31
|
prompt_instance = nil
|
28
32
|
@history_manager = nil
|
29
33
|
else
|
@@ -47,10 +51,15 @@ module AIA
|
|
47
51
|
prompt_id = AIA.config.prompt_id
|
48
52
|
role_id = AIA.config.role
|
49
53
|
|
50
|
-
# Handle chat mode
|
54
|
+
# Handle chat mode
|
51
55
|
if AIA.chat?
|
52
56
|
AIA::Utility.robot
|
53
|
-
|
57
|
+
# If we're in chat mode with only context files, go straight to chat
|
58
|
+
if prompt_id.empty? && role_id.empty? && AIA.config.context_files && !AIA.config.context_files.empty?
|
59
|
+
start_chat
|
60
|
+
return
|
61
|
+
elsif prompt_id.empty? && role_id.empty?
|
62
|
+
# Even with an empty prompt_id, we might have context files
|
54
63
|
start_chat
|
55
64
|
return
|
56
65
|
end
|
@@ -137,12 +146,12 @@ module AIA
|
|
137
146
|
# --- Enter chat mode AFTER processing initial prompt ---
|
138
147
|
if AIA.chat?
|
139
148
|
@ui_presenter.display_separator # Add separator
|
140
|
-
start_chat # start_chat will use the now populated context
|
149
|
+
start_chat(skip_context_files: true) # start_chat will use the now populated context
|
141
150
|
end
|
142
151
|
end
|
143
152
|
|
144
153
|
# Starts the interactive chat session.
|
145
|
-
def start_chat
|
154
|
+
def start_chat(skip_context_files: false)
|
146
155
|
# Consider if display_chat_header is needed if robot+separator already shown
|
147
156
|
# For now, let's keep it, maybe add an indicator message
|
148
157
|
puts "\nEntering interactive chat mode..."
|
@@ -150,11 +159,67 @@ module AIA
|
|
150
159
|
|
151
160
|
Reline::HISTORY.clear # Keep Reline history for user input editing, separate from chat context
|
152
161
|
|
162
|
+
# Load context files if any and not skipping
|
163
|
+
if !skip_context_files && AIA.config.context_files && !AIA.config.context_files.empty?
|
164
|
+
context_content = AIA.config.context_files.map do |file|
|
165
|
+
File.read(file) rescue "Error reading file: #{file}"
|
166
|
+
end.join("\n\n")
|
167
|
+
|
168
|
+
if !context_content.empty?
|
169
|
+
# Add context files content to context
|
170
|
+
@context_manager.add_to_context(role: 'user', content: context_content)
|
171
|
+
|
172
|
+
# Process the context
|
173
|
+
operation_type = @chat_processor.determine_operation_type(AIA.config.model)
|
174
|
+
@ui_presenter.display_thinking_animation
|
175
|
+
response = @chat_processor.process_prompt(@context_manager.get_context, operation_type)
|
176
|
+
|
177
|
+
# Add AI response to context
|
178
|
+
@context_manager.add_to_context(role: 'assistant', content: response)
|
179
|
+
|
180
|
+
# Output the response
|
181
|
+
@chat_processor.output_response(response)
|
182
|
+
@chat_processor.speak(response)
|
183
|
+
@ui_presenter.display_separator
|
184
|
+
end
|
185
|
+
end
|
186
|
+
|
187
|
+
# Check for piped input (STDIN not a TTY and has data)
|
188
|
+
if !STDIN.tty?
|
189
|
+
# Save the original STDIN
|
190
|
+
orig_stdin = STDIN.dup
|
191
|
+
|
192
|
+
# Read the piped input
|
193
|
+
piped_input = STDIN.read.strip
|
194
|
+
|
195
|
+
# Reopen STDIN to the terminal
|
196
|
+
STDIN.reopen('/dev/tty')
|
197
|
+
|
198
|
+
if !piped_input.empty?
|
199
|
+
# Add piped input to context
|
200
|
+
@context_manager.add_to_context(role: 'user', content: piped_input)
|
201
|
+
|
202
|
+
# Process the piped input
|
203
|
+
operation_type = @chat_processor.determine_operation_type(AIA.config.model)
|
204
|
+
@ui_presenter.display_thinking_animation
|
205
|
+
response = @chat_processor.process_prompt(@context_manager.get_context, operation_type)
|
206
|
+
|
207
|
+
# Add AI response to context
|
208
|
+
@context_manager.add_to_context(role: 'assistant', content: response)
|
209
|
+
|
210
|
+
# Output the response
|
211
|
+
@chat_processor.output_response(response)
|
212
|
+
@chat_processor.speak(response)
|
213
|
+
@ui_presenter.display_separator
|
214
|
+
end
|
215
|
+
end
|
216
|
+
|
153
217
|
loop do
|
154
218
|
# Get user input
|
155
219
|
prompt = @ui_presenter.ask_question
|
156
220
|
|
157
221
|
|
222
|
+
|
158
223
|
break if prompt.nil? || prompt.strip.downcase == 'exit' || prompt.strip.empty?
|
159
224
|
|
160
225
|
if AIA.config.out_file
|
@@ -167,7 +232,7 @@ module AIA
|
|
167
232
|
directive_output = @directive_processor.process(prompt, @context_manager) # Pass context_manager
|
168
233
|
|
169
234
|
# Add check for specific directives like //clear that might modify context
|
170
|
-
if prompt.strip.start_with?('//clear'
|
235
|
+
if prompt.strip.start_with?('//clear')
|
171
236
|
# Context is likely cleared within directive_processor.process now
|
172
237
|
# or add @context_manager.clear_context here if not handled internally
|
173
238
|
@ui_presenter.display_info("Chat context cleared.")
|
@@ -191,10 +256,6 @@ module AIA
|
|
191
256
|
# Use ContextManager to get the conversation
|
192
257
|
conversation = @context_manager.get_context # System prompt handled internally
|
193
258
|
|
194
|
-
# FIXME: remove this comment once verified
|
195
|
-
# is conversation the same thing as the context for a chat session? YES
|
196
|
-
# if so need to somehow delete it when the //clear directive is entered. - Addressed above/in DirectiveProcessor
|
197
|
-
|
198
259
|
operation_type = @chat_processor.determine_operation_type(AIA.config.model)
|
199
260
|
@ui_presenter.display_thinking_animation
|
200
261
|
response = @chat_processor.process_prompt(conversation, operation_type)
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: aia
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.8.
|
4
|
+
version: 0.8.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Dewayne VanHoozer
|
@@ -292,7 +292,6 @@ files:
|
|
292
292
|
- lib/aia/prompt_handler.rb
|
293
293
|
- lib/aia/session.rb
|
294
294
|
- lib/aia/shell_command_executor.rb
|
295
|
-
- lib/aia/tools/ai_client_backend.rb
|
296
295
|
- lib/aia/ui_presenter.rb
|
297
296
|
- lib/aia/utility.rb
|
298
297
|
- lib/aia/version.rb
|
@@ -1,92 +0,0 @@
|
|
1
|
-
# lib/aia/tools/ai_client_backend.rb
|
2
|
-
|
3
|
-
# This is WIP in the `develop` branch
|
4
|
-
# do not use.
|
5
|
-
|
6
|
-
require 'ai_client'
|
7
|
-
require_relative 'backend_common'
|
8
|
-
|
9
|
-
class AIA::AiClientBackend < AIA::Tools
|
10
|
-
include AIA::BackendCommon
|
11
|
-
|
12
|
-
meta(
|
13
|
-
name: 'ai_client',
|
14
|
-
role: :backend,
|
15
|
-
desc: 'AI Client integration for unified model access',
|
16
|
-
url: 'https://github.com/path/to/ai_client', # TODO: Update URL
|
17
|
-
install: 'gem install ai_client',
|
18
|
-
)
|
19
|
-
|
20
|
-
attr_reader :client, :raw_response
|
21
|
-
|
22
|
-
DEFAULT_PARAMETERS = ''
|
23
|
-
DIRECTIVES = %w[
|
24
|
-
model
|
25
|
-
temperature
|
26
|
-
max_tokens
|
27
|
-
top_p
|
28
|
-
frequency_penalty
|
29
|
-
presence_penalty
|
30
|
-
]
|
31
|
-
|
32
|
-
def initialize(text: "", files: [])
|
33
|
-
super
|
34
|
-
@client = AiClient.new
|
35
|
-
end
|
36
|
-
|
37
|
-
def build_command
|
38
|
-
# No-op - ai_client doesn't use command line
|
39
|
-
@parameters = ""
|
40
|
-
end
|
41
|
-
|
42
|
-
def run
|
43
|
-
handle_model(AIA.config.model)
|
44
|
-
rescue => e
|
45
|
-
puts "Error handling model #{AIA.config.model}: #{e.message}"
|
46
|
-
end
|
47
|
-
|
48
|
-
private
|
49
|
-
|
50
|
-
def handle_model(model_name)
|
51
|
-
case model_name
|
52
|
-
when /vision/
|
53
|
-
image2text
|
54
|
-
when /^gpt/
|
55
|
-
text2text
|
56
|
-
when /^dall-e/
|
57
|
-
text2image
|
58
|
-
when /^tts/
|
59
|
-
text2audio
|
60
|
-
when /^whisper/
|
61
|
-
audio2text
|
62
|
-
else
|
63
|
-
raise "Unsupported model: #{model_name}"
|
64
|
-
end
|
65
|
-
end
|
66
|
-
|
67
|
-
def text2text
|
68
|
-
response = client.complete(
|
69
|
-
prompt: text,
|
70
|
-
model: AIA.config.model,
|
71
|
-
temperature: AIA.config.temp
|
72
|
-
)
|
73
|
-
response.completion
|
74
|
-
end
|
75
|
-
|
76
|
-
# Placeholder methods to maintain API compatibility
|
77
|
-
def image2text
|
78
|
-
raise "Not yet implemented for ai_client"
|
79
|
-
end
|
80
|
-
|
81
|
-
def text2image
|
82
|
-
raise "Not yet implemented for ai_client"
|
83
|
-
end
|
84
|
-
|
85
|
-
def text2audio
|
86
|
-
raise "Not yet implemented for ai_client"
|
87
|
-
end
|
88
|
-
|
89
|
-
def audio2text
|
90
|
-
raise "Not yet implemented for ai_client"
|
91
|
-
end
|
92
|
-
end
|