aia 0.9.6 → 0.9.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.config/tocer/configuration.yml +2 -1
- data/.version +1 -1
- data/CHANGELOG.md +17 -0
- data/README.md +90 -45
- data/examples/headlines +24 -15
- data/examples/tools/pdf_page_reader.rb +30 -0
- data/examples/tools/run_shell_command.rb +16 -4
- data/lib/aia/chat_processor_service.rb +3 -23
- data/lib/aia/config.rb +124 -10
- data/lib/aia/directive_processor.rb +37 -3
- data/lib/aia/ruby_llm_adapter.rb +2 -0
- data/lib/aia/session.rb +183 -125
- data/lib/aia/ui_presenter.rb +1 -1
- data/lib/aia.rb +4 -2
- metadata +15 -14
data/lib/aia/config.rb
CHANGED
@@ -54,7 +54,6 @@ module AIA
|
|
54
54
|
append: false, # Default to not append to existing out_file
|
55
55
|
|
56
56
|
# workflow
|
57
|
-
next: nil,
|
58
57
|
pipeline: [],
|
59
58
|
|
60
59
|
# PromptManager::Prompt Tailoring
|
@@ -124,6 +123,18 @@ module AIA
|
|
124
123
|
remaining_args = config.remaining_args.dup
|
125
124
|
config.remaining_args = nil
|
126
125
|
|
126
|
+
# Check for STDIN content
|
127
|
+
stdin_content = ''
|
128
|
+
|
129
|
+
if !STDIN.tty? && !STDIN.closed?
|
130
|
+
begin
|
131
|
+
stdin_content << "\n" + STDIN.read
|
132
|
+
STDIN.reopen('/dev/tty') # Reopen STDIN for interactive use
|
133
|
+
rescue => _
|
134
|
+
# If we can't reopen, continue without error
|
135
|
+
end
|
136
|
+
end
|
137
|
+
|
127
138
|
# Is first remaining argument a prompt ID?
|
128
139
|
unless remaining_args.empty?
|
129
140
|
maybe_id = remaining_args.first
|
@@ -134,6 +145,11 @@ module AIA
|
|
134
145
|
end
|
135
146
|
end
|
136
147
|
|
148
|
+
# Store STDIN content for later processing in session.rb
|
149
|
+
if stdin_content && !stdin_content.strip.empty?
|
150
|
+
config.stdin_content = stdin_content
|
151
|
+
end
|
152
|
+
|
137
153
|
unless remaining_args.empty?
|
138
154
|
bad_files = remaining_args.reject { |filename| AIA.good_file?(filename) }
|
139
155
|
if bad_files.any?
|
@@ -141,9 +157,21 @@ module AIA
|
|
141
157
|
exit 1
|
142
158
|
end
|
143
159
|
|
144
|
-
config.context_files
|
160
|
+
config.context_files ||= []
|
161
|
+
config.context_files += remaining_args
|
162
|
+
end
|
163
|
+
|
164
|
+
# Check if the last context file is an executable prompt
|
165
|
+
if config.executable_prompt &&
|
166
|
+
config.context_files &&
|
167
|
+
!config.context_files.empty?
|
168
|
+
config.executable_prompt_file = config.context_files.pop
|
145
169
|
end
|
146
170
|
|
171
|
+
# TODO: Consider that if there is no prompt ID but there is an executable prompt
|
172
|
+
# then maybe that is all that is needed.
|
173
|
+
|
174
|
+
|
147
175
|
if config.prompt_id.nil? && !config.chat && !config.fuzzy
|
148
176
|
STDERR.puts "Error: A prompt ID is required unless using --chat, --fuzzy, or providing context files. Use -h or --help for help."
|
149
177
|
exit 1
|
@@ -162,6 +190,7 @@ module AIA
|
|
162
190
|
if config.prompt_id.nil? || config.prompt_id.empty?
|
163
191
|
if !config.role.nil? || !config.role.empty?
|
164
192
|
config.prompt_id = config.role
|
193
|
+
config.pipeline.prepend config.prompt_id
|
165
194
|
config.role = ''
|
166
195
|
end
|
167
196
|
end
|
@@ -214,6 +243,25 @@ module AIA
|
|
214
243
|
PromptManager::Prompt.parameter_regex = Regexp.new(config.parameter_regex)
|
215
244
|
end
|
216
245
|
|
246
|
+
if !config.prompt_id.empty? && config.prompt_id != config.pipeline.first
|
247
|
+
config.pipeline.prepend config.prompt_id
|
248
|
+
end
|
249
|
+
|
250
|
+
unless config.pipeline.empty?
|
251
|
+
config.pipeline.each do |prompt_id|
|
252
|
+
# Skip empty prompt IDs (can happen in chat-only mode)
|
253
|
+
next if prompt_id.nil? || prompt_id.empty?
|
254
|
+
|
255
|
+
prompt_file_path = File.join(config.prompts_dir, "#{prompt_id}.txt")
|
256
|
+
unless File.exist?(prompt_file_path)
|
257
|
+
STDERR.puts "Error: Prompt ID '#{prompt_id}' does not exist at #{prompt_file_path}"
|
258
|
+
and_exit = true
|
259
|
+
end
|
260
|
+
end
|
261
|
+
end
|
262
|
+
|
263
|
+
exit(1) if and_exit
|
264
|
+
|
217
265
|
config
|
218
266
|
end
|
219
267
|
|
@@ -309,7 +357,7 @@ module AIA
|
|
309
357
|
" aia --chat [PROMPT_ID] [CONTEXT_FILE]*\n" +
|
310
358
|
" aia --chat [CONTEXT_FILE]*"
|
311
359
|
|
312
|
-
opts.on("--chat", "Begin a chat session with the LLM after
|
360
|
+
opts.on("--chat", "Begin a chat session with the LLM after processing all prompts in the pipeline.") do
|
313
361
|
config.chat = true
|
314
362
|
puts "Debug: Setting chat mode to true" if config.debug
|
315
363
|
end
|
@@ -325,10 +373,68 @@ module AIA
|
|
325
373
|
end
|
326
374
|
end
|
327
375
|
|
376
|
+
opts.on('--available_models [QUERY]', 'List (then exit) available models that match the optional query - a comma separated list of AND components like: openai,mini') do |query|
|
377
|
+
|
378
|
+
# SMELL: mostly duplications the code in the vailable_models directive
|
379
|
+
# assumes that the adapter is for the ruby_llm gem
|
380
|
+
# should this be moved to the Utilities class as a common method?
|
381
|
+
|
382
|
+
if query.nil?
|
383
|
+
query = []
|
384
|
+
else
|
385
|
+
query = query.split(',')
|
386
|
+
end
|
387
|
+
|
388
|
+
header = "\nAvailable LLMs"
|
389
|
+
header += " for #{query.join(' and ')}" if query
|
390
|
+
|
391
|
+
puts header + ':'
|
392
|
+
puts
|
393
|
+
|
394
|
+
q1 = query.select{|q| q.include?('_to_')}.map{|q| ':'==q[0] ? q[1...] : q}
|
395
|
+
q2 = query.reject{|q| q.include?('_to_')}
|
396
|
+
|
397
|
+
|
398
|
+
# query = nil
|
399
|
+
counter = 0
|
400
|
+
|
401
|
+
RubyLLM.models.all.each do |llm|
|
402
|
+
inputs = llm.modalities.input.join(',')
|
403
|
+
outputs = llm.modalities.output.join(',')
|
404
|
+
entry = "- #{llm.id} (#{llm.provider}) #{inputs} to #{outputs}"
|
405
|
+
|
406
|
+
if query.nil? || query.empty?
|
407
|
+
counter += 1
|
408
|
+
puts entry
|
409
|
+
next
|
410
|
+
end
|
411
|
+
|
412
|
+
show_it = true
|
413
|
+
q1.each{|q| show_it &&= llm.modalities.send("#{q}?")}
|
414
|
+
q2.each{|q| show_it &&= entry.include?(q)}
|
415
|
+
|
416
|
+
if show_it
|
417
|
+
counter += 1
|
418
|
+
puts entry
|
419
|
+
end
|
420
|
+
end
|
421
|
+
|
422
|
+
puts if counter > 0
|
423
|
+
puts "#{counter} LLMs matching your query"
|
424
|
+
puts
|
425
|
+
|
426
|
+
exit
|
427
|
+
end
|
428
|
+
|
328
429
|
opts.on("-m MODEL", "--model MODEL", "Name of the LLM model to use") do |model|
|
329
430
|
config.model = model
|
330
431
|
end
|
331
432
|
|
433
|
+
opts.on("-x", "--[no-]exec", "Used to designate an executable prompt file") do |value|
|
434
|
+
config.executable_prompt = value
|
435
|
+
end
|
436
|
+
|
437
|
+
|
332
438
|
opts.on("--terse", "Adds a special instruction to the prompt asking the AI to keep responses short and to the point") do
|
333
439
|
config.terse = true
|
334
440
|
end
|
@@ -362,7 +468,7 @@ module AIA
|
|
362
468
|
end
|
363
469
|
end
|
364
470
|
|
365
|
-
opts.on("
|
471
|
+
opts.on("--prompts_dir DIR", "Directory containing prompt files") do |dir|
|
366
472
|
config.prompts_dir = dir
|
367
473
|
end
|
368
474
|
|
@@ -383,7 +489,13 @@ module AIA
|
|
383
489
|
end
|
384
490
|
|
385
491
|
opts.on("-o", "--[no-]out_file [FILE]", "Output file (default: temp.md)") do |file|
|
386
|
-
|
492
|
+
if file == false # --no-out_file was used
|
493
|
+
config.out_file = nil
|
494
|
+
elsif file.nil? # No argument provided
|
495
|
+
config.out_file = 'temp.md'
|
496
|
+
else # File name provided
|
497
|
+
config.out_file = File.expand_path(file, Dir.pwd)
|
498
|
+
end
|
387
499
|
end
|
388
500
|
|
389
501
|
opts.on("-a", "--[no-]append", "Append to output file instead of overwriting") do |append|
|
@@ -399,11 +511,13 @@ module AIA
|
|
399
511
|
end
|
400
512
|
|
401
513
|
opts.on("-n", "--next PROMPT_ID", "Next prompt to process") do |next_prompt|
|
402
|
-
config.
|
514
|
+
config.pipeline ||= []
|
515
|
+
config.pipeline << next_prompt
|
403
516
|
end
|
404
517
|
|
405
|
-
opts.on("--pipeline PROMPTS", "Pipeline of
|
406
|
-
config.pipeline
|
518
|
+
opts.on("-p PROMPTS", "--pipeline PROMPTS", "Pipeline of comma-seperated prompt IDs to process") do |pipeline|
|
519
|
+
config.pipeline ||= []
|
520
|
+
config.pipeline += pipeline.split(',').map(&:strip)
|
407
521
|
end
|
408
522
|
|
409
523
|
opts.on("-f", "--fuzzy", "Use fuzzy matching for prompt search") do
|
@@ -422,8 +536,8 @@ module AIA
|
|
422
536
|
config.debug = $DEBUG_ME = false
|
423
537
|
end
|
424
538
|
|
425
|
-
opts.on("-v", "--verbose", "Be verbose") do
|
426
|
-
config.verbose =
|
539
|
+
opts.on("-v", "--[no-]verbose", "Be verbose") do |value|
|
540
|
+
config.verbose = value
|
427
541
|
end
|
428
542
|
|
429
543
|
opts.on("--speak", "Simple implementation. Uses the speech model to convert text to audio, then plays the audio. Fun with --chat. Supports configuration of speech model and voice.") do
|
@@ -1,6 +1,8 @@
|
|
1
1
|
# lib/aia/directive_processor.rb
|
2
2
|
|
3
|
+
require 'active_support/all'
|
3
4
|
require 'faraday'
|
5
|
+
require 'word_wrapper' # Pure ruby word wrapping
|
4
6
|
|
5
7
|
module AIA
|
6
8
|
class DirectiveProcessor
|
@@ -145,7 +147,7 @@ module AIA
|
|
145
147
|
end
|
146
148
|
|
147
149
|
desc "Specify the next prompt ID to process after this one"
|
148
|
-
def next(args = [])
|
150
|
+
def next(args = [], context_manager=nil)
|
149
151
|
if args.empty?
|
150
152
|
ap AIA.config.next
|
151
153
|
else
|
@@ -154,12 +156,39 @@ module AIA
|
|
154
156
|
''
|
155
157
|
end
|
156
158
|
|
159
|
+
desc "Show a list of tools and their description"
|
160
|
+
def tools(args = [], context_manager=nil)
|
161
|
+
indent = 4
|
162
|
+
spaces = " "*indent
|
163
|
+
width = TTY::Screen.width - indent - 2
|
164
|
+
|
165
|
+
if !AIA.config.tools.empty?
|
166
|
+
puts
|
167
|
+
puts "Available Tools"
|
168
|
+
puts "==============="
|
169
|
+
|
170
|
+
AIA.config.tools.split(',').map(&:strip).each do |tool|
|
171
|
+
klass = tool.constantize
|
172
|
+
puts "\n#{klass.name}"
|
173
|
+
puts "-"*klass.name.size
|
174
|
+
puts WordWrapper::MinimumRaggedness.new(width, klass.description).wrap.split("\n").map{|s| spaces+s+"\n"}.join
|
175
|
+
end
|
176
|
+
else
|
177
|
+
puts "No tools configured"
|
178
|
+
end
|
179
|
+
puts
|
180
|
+
|
181
|
+
''
|
182
|
+
end
|
183
|
+
|
157
184
|
desc "Specify a sequence pf prompt IDs to process after this one"
|
158
|
-
def pipeline(args = [])
|
185
|
+
def pipeline(args = [], context_manager=nil)
|
159
186
|
if args.empty?
|
160
187
|
ap AIA.config.pipeline
|
188
|
+
elsif 1 == args.size
|
189
|
+
AIA.config.pipeline += args.first.split(',').map(&:strip).reject{|id| id.empty?}
|
161
190
|
else
|
162
|
-
AIA.config.pipeline += args.map
|
191
|
+
AIA.config.pipeline += args.map{|id| id.gsub(',', '').strip}.reject{|id| id.empty?}
|
163
192
|
end
|
164
193
|
''
|
165
194
|
end
|
@@ -299,6 +328,11 @@ module AIA
|
|
299
328
|
desc "All Available models or query on [partial LLM or provider name] Examples: //llms ; //llms openai ; //llms claude"
|
300
329
|
def available_models( args=nil, context_manager=nil)
|
301
330
|
query = args
|
331
|
+
|
332
|
+
if 1 == query.size
|
333
|
+
query = query.first.split(',')
|
334
|
+
end
|
335
|
+
|
302
336
|
header = "\nAvailable LLMs"
|
303
337
|
header += " for #{query.join(' and ')}" if query
|
304
338
|
|
data/lib/aia/ruby_llm_adapter.rb
CHANGED