aia 1.0.0.pre.beta → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.version +1 -1
- data/CHANGELOG.md +89 -0
- data/COMMITS.md +192 -11
- data/README.md +327 -110
- data/docs/cli-reference.md +93 -10
- data/docs/configuration.md +29 -36
- data/docs/contributing.md +2 -2
- data/docs/directives-reference.md +49 -27
- data/docs/examples/index.md +2 -2
- data/docs/examples/mcp/index.md +93 -97
- data/docs/examples/prompts/automation/index.md +3 -2
- data/docs/examples/tools/index.md +17 -27
- data/docs/faq.md +9 -12
- data/docs/guides/basic-usage.md +4 -4
- data/docs/guides/chat.md +39 -34
- data/docs/guides/tools.md +4 -4
- data/docs/index.md +36 -62
- data/docs/installation.md +1 -1
- data/docs/mcp-integration.md +75 -139
- data/docs/prompt_management.md +88 -1
- data/docs/security.md +79 -81
- data/docs/tools-and-mcp-examples.md +8 -6
- data/docs/workflows-and-pipelines.md +2 -6
- data/examples/.gitignore +1 -0
- data/examples/README.md +41 -0
- data/examples/run_all.sh +261 -0
- data/lib/aia/adapter/chat_execution.rb +9 -7
- data/lib/aia/adapter/mcp_connector.rb +0 -29
- data/lib/aia/adapter/modality_handlers.rb +23 -15
- data/lib/aia/adapter/tool_filter.rb +21 -0
- data/lib/aia/adapter/tool_loader.rb +1 -9
- data/lib/aia/chat_loop.rb +244 -0
- data/lib/aia/chat_processor_service.rb +6 -3
- data/lib/aia/config/cli_parser.rb +56 -18
- data/lib/aia/config/defaults.yml +17 -2
- data/lib/aia/config/validator.rb +52 -11
- data/lib/aia/config.rb +29 -3
- data/lib/aia/directive.rb +29 -0
- data/lib/aia/directives/configuration_directives.rb +2 -1
- data/lib/aia/directives/execution_directives.rb +1 -1
- data/lib/aia/directives/model_directives.rb +28 -27
- data/lib/aia/directives/web_and_file_directives.rb +78 -40
- data/lib/aia/errors.rb +20 -1
- data/lib/aia/fzf.rb +8 -7
- data/lib/aia/input_collector.rb +24 -0
- data/lib/aia/prompt_handler.rb +36 -8
- data/lib/aia/prompt_pipeline.rb +183 -0
- data/lib/aia/session.rb +22 -372
- data/lib/aia/skill_utils.rb +61 -0
- data/lib/aia/ui_presenter.rb +8 -0
- data/lib/aia.rb +4 -0
- metadata +19 -45
|
@@ -0,0 +1,244 @@
|
|
|
1
|
+
# lib/aia/chat_loop.rb
|
|
2
|
+
# frozen_string_literal: true
|
|
3
|
+
|
|
4
|
+
require "reline"
|
|
5
|
+
require "pm"
|
|
6
|
+
|
|
7
|
+
module AIA
|
|
8
|
+
class ChatLoop
|
|
9
|
+
include AIA::SkillUtils
|
|
10
|
+
|
|
11
|
+
def initialize(chat_processor, ui_presenter, directive_processor)
|
|
12
|
+
@chat_processor = chat_processor
|
|
13
|
+
@ui_presenter = ui_presenter
|
|
14
|
+
@directive_processor = directive_processor
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
# Start the interactive chat session
|
|
18
|
+
def start(skip_context_files: false)
|
|
19
|
+
setup_session
|
|
20
|
+
process_role_context
|
|
21
|
+
process_skill_context
|
|
22
|
+
process_initial_context(skip_context_files)
|
|
23
|
+
handle_piped_input
|
|
24
|
+
run_loop
|
|
25
|
+
ensure
|
|
26
|
+
@ui_presenter.display_chat_end
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
private
|
|
30
|
+
|
|
31
|
+
def setup_session
|
|
32
|
+
initialize_ui
|
|
33
|
+
setup_signals
|
|
34
|
+
Reline::HISTORY.clear
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
def initialize_ui
|
|
38
|
+
puts "\nEntering interactive chat mode..."
|
|
39
|
+
@ui_presenter.display_chat_header
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
def setup_signals
|
|
43
|
+
Signal.trap("INT") { exit }
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
def process_role_context
|
|
47
|
+
role = AIA.config.prompts.role
|
|
48
|
+
return if role.nil? || role.empty?
|
|
49
|
+
|
|
50
|
+
prompt_handler = AIA::PromptHandler.new
|
|
51
|
+
role_parsed = prompt_handler.fetch_role(role)
|
|
52
|
+
return if role_parsed.nil?
|
|
53
|
+
|
|
54
|
+
role_content = role_parsed.to_s
|
|
55
|
+
return if role_content.nil? || role_content.strip.empty?
|
|
56
|
+
|
|
57
|
+
return unless AIA.client.respond_to?(:chats)
|
|
58
|
+
|
|
59
|
+
system_msg = RubyLLM::Message.new(role: :system, content: role_content)
|
|
60
|
+
|
|
61
|
+
AIA.client.chats.each_value do |chat|
|
|
62
|
+
next if chat.messages.any? { |m| m.role == :system }
|
|
63
|
+
chat.add_message(system_msg)
|
|
64
|
+
end
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
def process_skill_context
|
|
68
|
+
skills = AIA.config.prompts.skills
|
|
69
|
+
return if skills.nil? || skills.empty?
|
|
70
|
+
|
|
71
|
+
skills_dir = AIA.config.skills.dir
|
|
72
|
+
bodies = Array(skills).filter_map do |skill_name|
|
|
73
|
+
skill_name = skill_name.to_s.strip
|
|
74
|
+
next if skill_name.empty?
|
|
75
|
+
|
|
76
|
+
skill_path = find_skill_dir(skill_name, skills_dir)
|
|
77
|
+
next unless skill_path
|
|
78
|
+
|
|
79
|
+
if File.file?(skill_path)
|
|
80
|
+
skill_body(File.read(skill_path))
|
|
81
|
+
else
|
|
82
|
+
md = File.join(skill_path, 'SKILL.md')
|
|
83
|
+
skill_body(File.read(md)) if File.exist?(md)
|
|
84
|
+
end
|
|
85
|
+
end
|
|
86
|
+
|
|
87
|
+
return if bodies.empty?
|
|
88
|
+
|
|
89
|
+
skill_content = bodies.join("\n\n")
|
|
90
|
+
response_data = @chat_processor.process_prompt(skill_content)
|
|
91
|
+
content = response_data.is_a?(Hash) ? response_data[:content] : response_data
|
|
92
|
+
@chat_processor.output_response(content)
|
|
93
|
+
@ui_presenter.display_separator
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
def process_initial_context(skip_context_files)
|
|
97
|
+
return if skip_context_files || !AIA.config.context_files || AIA.config.context_files.empty?
|
|
98
|
+
|
|
99
|
+
context = AIA.config.context_files.map do |file|
|
|
100
|
+
File.read(file) rescue "Error reading file: #{file}"
|
|
101
|
+
end.join("\n\n")
|
|
102
|
+
|
|
103
|
+
return if context.empty?
|
|
104
|
+
|
|
105
|
+
response_data = @chat_processor.process_prompt(context)
|
|
106
|
+
content = response_data.is_a?(Hash) ? response_data[:content] : response_data
|
|
107
|
+
|
|
108
|
+
@chat_processor.output_response(content)
|
|
109
|
+
@chat_processor.speak(content)
|
|
110
|
+
@ui_presenter.display_separator
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
def handle_piped_input
|
|
114
|
+
return if STDIN.tty?
|
|
115
|
+
return unless File.exist?("/dev/tty") && File.readable?("/dev/tty") && File.writable?("/dev/tty")
|
|
116
|
+
|
|
117
|
+
begin
|
|
118
|
+
original_stdin = STDIN.dup
|
|
119
|
+
piped_input = STDIN.read.strip
|
|
120
|
+
STDIN.reopen("/dev/tty")
|
|
121
|
+
|
|
122
|
+
return if piped_input.empty?
|
|
123
|
+
|
|
124
|
+
processed_input = PM.parse_string(piped_input).to_s
|
|
125
|
+
|
|
126
|
+
response_data = @chat_processor.process_prompt(processed_input)
|
|
127
|
+
content = response_data.is_a?(Hash) ? response_data[:content] : response_data
|
|
128
|
+
|
|
129
|
+
@chat_processor.output_response(content)
|
|
130
|
+
@chat_processor.speak(content) if AIA.speak?
|
|
131
|
+
@ui_presenter.display_separator
|
|
132
|
+
|
|
133
|
+
STDIN.reopen(original_stdin)
|
|
134
|
+
rescue Errno::ENXIO => e
|
|
135
|
+
warn "Warning: Unable to handle piped input due to TTY unavailability: #{e.message}"
|
|
136
|
+
return
|
|
137
|
+
rescue StandardError => e
|
|
138
|
+
warn "Warning: Error handling piped input: #{e.message}"
|
|
139
|
+
return
|
|
140
|
+
end
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
def run_loop
|
|
144
|
+
loop do
|
|
145
|
+
follow_up_prompt = @ui_presenter.ask_question
|
|
146
|
+
|
|
147
|
+
break if follow_up_prompt.nil? || follow_up_prompt.strip.downcase == "exit" || follow_up_prompt.strip.empty?
|
|
148
|
+
|
|
149
|
+
if AIA.config.output.file
|
|
150
|
+
File.open(AIA.config.output.file, "a") do |file|
|
|
151
|
+
file.puts "\nYou: #{follow_up_prompt}"
|
|
152
|
+
end
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
if @directive_processor.directive?(follow_up_prompt)
|
|
156
|
+
follow_up_prompt = process_directive(follow_up_prompt)
|
|
157
|
+
next if follow_up_prompt.nil?
|
|
158
|
+
end
|
|
159
|
+
|
|
160
|
+
begin
|
|
161
|
+
processed_prompt = PM.parse_string(follow_up_prompt).to_s
|
|
162
|
+
rescue StandardError => e
|
|
163
|
+
@ui_presenter.display_info("Error: #{e.class}: #{e.message}")
|
|
164
|
+
next
|
|
165
|
+
end
|
|
166
|
+
|
|
167
|
+
response_data = @chat_processor.process_prompt(processed_prompt)
|
|
168
|
+
|
|
169
|
+
if response_data.is_a?(Hash)
|
|
170
|
+
content = response_data[:content]
|
|
171
|
+
metrics = response_data[:metrics]
|
|
172
|
+
multi_metrics = response_data[:multi_metrics]
|
|
173
|
+
else
|
|
174
|
+
content = response_data
|
|
175
|
+
metrics = nil
|
|
176
|
+
multi_metrics = nil
|
|
177
|
+
end
|
|
178
|
+
|
|
179
|
+
@ui_presenter.display_ai_response(content)
|
|
180
|
+
|
|
181
|
+
if AIA.config.flags.tokens
|
|
182
|
+
if multi_metrics
|
|
183
|
+
@ui_presenter.display_multi_model_metrics(multi_metrics)
|
|
184
|
+
elsif metrics
|
|
185
|
+
@ui_presenter.display_token_metrics(metrics)
|
|
186
|
+
end
|
|
187
|
+
end
|
|
188
|
+
|
|
189
|
+
@chat_processor.speak(content)
|
|
190
|
+
@ui_presenter.display_separator
|
|
191
|
+
end
|
|
192
|
+
end
|
|
193
|
+
|
|
194
|
+
def process_directive(follow_up_prompt)
|
|
195
|
+
directive_output = @directive_processor.process(follow_up_prompt, nil)
|
|
196
|
+
|
|
197
|
+
if follow_up_prompt.strip.start_with?("/clear", "/checkpoint", "/restore", "/review", "/context")
|
|
198
|
+
@ui_presenter.display_info(directive_output) unless directive_output.nil? || directive_output.strip.empty?
|
|
199
|
+
return nil
|
|
200
|
+
end
|
|
201
|
+
|
|
202
|
+
return nil if directive_output.nil? || directive_output.strip.empty?
|
|
203
|
+
|
|
204
|
+
handle_successful_directive(follow_up_prompt, directive_output)
|
|
205
|
+
end
|
|
206
|
+
|
|
207
|
+
def handle_successful_directive(follow_up_prompt, directive_output)
|
|
208
|
+
puts "\n#{directive_output}\n"
|
|
209
|
+
"I executed this directive: #{follow_up_prompt}\nHere's the output: #{directive_output}\nLet's continue our conversation."
|
|
210
|
+
end
|
|
211
|
+
|
|
212
|
+
# Parse multi-model response into per-model responses (ADR-002 revised + ADR-005)
|
|
213
|
+
def parse_multi_model_response(combined_response)
|
|
214
|
+
return {} if combined_response.nil? || combined_response.empty?
|
|
215
|
+
|
|
216
|
+
responses = {}
|
|
217
|
+
current_model = nil
|
|
218
|
+
current_content = []
|
|
219
|
+
|
|
220
|
+
combined_response.each_line do |line|
|
|
221
|
+
if line =~ /^from:\s+(.+)$/
|
|
222
|
+
if current_model
|
|
223
|
+
responses[current_model] = current_content.join.strip
|
|
224
|
+
end
|
|
225
|
+
|
|
226
|
+
display_name = $1.strip
|
|
227
|
+
internal_id = display_name.sub(/\s+\([^)]+\)\s*$/, '')
|
|
228
|
+
internal_id = internal_id.sub(/\s+#/, '#')
|
|
229
|
+
|
|
230
|
+
current_model = internal_id
|
|
231
|
+
current_content = []
|
|
232
|
+
elsif current_model
|
|
233
|
+
current_content << line
|
|
234
|
+
end
|
|
235
|
+
end
|
|
236
|
+
|
|
237
|
+
if current_model
|
|
238
|
+
responses[current_model] = current_content.join.strip
|
|
239
|
+
end
|
|
240
|
+
|
|
241
|
+
responses
|
|
242
|
+
end
|
|
243
|
+
end
|
|
244
|
+
end
|
|
@@ -15,9 +15,9 @@ module AIA
|
|
|
15
15
|
@speaker ||= AiClient.new(AIA.config.audio.speech_model) if AIA.config.audio.speech_model
|
|
16
16
|
|
|
17
17
|
if @speaker
|
|
18
|
-
|
|
18
|
+
system(AIA.config.audio.speak_command, @speaker.speak(text).path)
|
|
19
19
|
else
|
|
20
|
-
|
|
20
|
+
warn "Warning: Unable to speak. Speech model not configured properly."
|
|
21
21
|
end
|
|
22
22
|
end
|
|
23
23
|
|
|
@@ -98,7 +98,10 @@ module AIA
|
|
|
98
98
|
first_model = models.first
|
|
99
99
|
model_name = first_model.respond_to?(:name) ? first_model.name : first_model.to_s
|
|
100
100
|
|
|
101
|
-
|
|
101
|
+
# client_model is the full resolved ID (e.g. "claude-sonnet-4-20250514"),
|
|
102
|
+
# model_name is the configured alias (e.g. "claude-sonnet-4").
|
|
103
|
+
# The alias is always a prefix/substring of the resolved ID, so check that way.
|
|
104
|
+
unless client_model.downcase.include?(model_name.downcase)
|
|
102
105
|
AIA.client = AIA.client.class.new
|
|
103
106
|
end
|
|
104
107
|
end
|
|
@@ -6,7 +6,9 @@
|
|
|
6
6
|
# for the Config class.
|
|
7
7
|
|
|
8
8
|
require 'optparse'
|
|
9
|
+
require 'yaml'
|
|
9
10
|
require_relative 'model_spec'
|
|
11
|
+
require_relative '../skill_utils'
|
|
10
12
|
|
|
11
13
|
module AIA
|
|
12
14
|
module CLIParser
|
|
@@ -21,8 +23,8 @@ module AIA
|
|
|
21
23
|
parser = create_option_parser(options)
|
|
22
24
|
parser.parse!
|
|
23
25
|
rescue OptionParser::InvalidOption, OptionParser::MissingArgument => e
|
|
24
|
-
|
|
25
|
-
|
|
26
|
+
warn "ERROR: #{e.message}"
|
|
27
|
+
warn " use --help for usage report"
|
|
26
28
|
exit 1
|
|
27
29
|
end
|
|
28
30
|
|
|
@@ -62,7 +64,7 @@ module AIA
|
|
|
62
64
|
|
|
63
65
|
opts.on("-f", "--fuzzy", "Use fuzzy matching for prompt search") do
|
|
64
66
|
unless system("which fzf > /dev/null 2>&1")
|
|
65
|
-
|
|
67
|
+
warn "Error: 'fzf' is not installed. Please install 'fzf' to use the --fuzzy option."
|
|
66
68
|
exit 1
|
|
67
69
|
end
|
|
68
70
|
options[:fuzzy] = true
|
|
@@ -166,6 +168,23 @@ module AIA
|
|
|
166
168
|
warn "Warning: --regex is deprecated. PM v1.0.0 uses ERB parameters (<%= param %>)."
|
|
167
169
|
options[:parameter_regex] = pattern
|
|
168
170
|
end
|
|
171
|
+
|
|
172
|
+
opts.on("--skills-dir DIR", "Set directory containing skill subdirectories") do |dir|
|
|
173
|
+
options[:skills_dir] = dir
|
|
174
|
+
end
|
|
175
|
+
|
|
176
|
+
opts.on("--skills-prefix PREFIX", "Set subdirectory name for skill files (default: skills)") do |prefix|
|
|
177
|
+
options[:skills_prefix] = prefix
|
|
178
|
+
end
|
|
179
|
+
|
|
180
|
+
opts.on("-s", "--skill SKILL_IDS", "Prepend skill(s) to prompt (comma-separated IDs or paths)") do |ids|
|
|
181
|
+
options[:skills] ||= []
|
|
182
|
+
options[:skills] += ids.split(',').map(&:strip)
|
|
183
|
+
end
|
|
184
|
+
|
|
185
|
+
opts.on("--list-skills", "List available skills and exit") do
|
|
186
|
+
options[:list_skills] = true
|
|
187
|
+
end
|
|
169
188
|
end
|
|
170
189
|
|
|
171
190
|
def setup_ai_parameters(opts, options)
|
|
@@ -249,7 +268,7 @@ module AIA
|
|
|
249
268
|
opts.on("--log-level LEVEL", "Set log level (debug|info|warn|error|fatal)") do |level|
|
|
250
269
|
level = level.downcase
|
|
251
270
|
unless %w[debug info warn error fatal].include?(level)
|
|
252
|
-
|
|
271
|
+
warn "ERROR: Invalid log level '#{level}'. Must be one of: debug, info, warn, error, fatal"
|
|
253
272
|
exit 1
|
|
254
273
|
end
|
|
255
274
|
options[:log_level_override] = level
|
|
@@ -410,6 +429,13 @@ module AIA
|
|
|
410
429
|
end
|
|
411
430
|
|
|
412
431
|
def validate_role_exists(role_id)
|
|
432
|
+
if AIA::SkillUtils.path_based_id?(role_id)
|
|
433
|
+
expanded = File.expand_path(role_id)
|
|
434
|
+
expanded += '.md' if File.extname(expanded).empty?
|
|
435
|
+
raise ArgumentError, "Role file not found: #{expanded}" unless File.exist?(expanded)
|
|
436
|
+
return
|
|
437
|
+
end
|
|
438
|
+
|
|
413
439
|
prompts_dir = ENV.fetch('AIA_PROMPTS__DIR', File.join(ENV['HOME'], '.prompts'))
|
|
414
440
|
roles_prefix = ENV.fetch('AIA_PROMPTS__ROLES_PREFIX', 'roles')
|
|
415
441
|
|
|
@@ -442,19 +468,30 @@ module AIA
|
|
|
442
468
|
roles_prefix = ENV.fetch('AIA_PROMPTS__ROLES_PREFIX', 'roles')
|
|
443
469
|
roles_dir = File.join(prompts_dir, roles_prefix)
|
|
444
470
|
|
|
445
|
-
|
|
446
|
-
roles = list_available_role_names(prompts_dir, roles_prefix)
|
|
447
|
-
|
|
448
|
-
if roles.empty?
|
|
449
|
-
puts "No role files found in #{roles_dir}"
|
|
450
|
-
puts "Create .md files in this directory to define roles."
|
|
451
|
-
else
|
|
452
|
-
puts "Available roles in #{roles_dir}:"
|
|
453
|
-
roles.each { |role| puts " - #{role}" }
|
|
454
|
-
end
|
|
455
|
-
else
|
|
471
|
+
unless Dir.exist?(roles_dir)
|
|
456
472
|
puts "No roles directory found at #{roles_dir}"
|
|
457
473
|
puts "Create this directory and add role files to use roles."
|
|
474
|
+
return
|
|
475
|
+
end
|
|
476
|
+
|
|
477
|
+
roles = list_available_role_names(prompts_dir, roles_prefix)
|
|
478
|
+
|
|
479
|
+
if roles.empty?
|
|
480
|
+
puts "No role files found in #{roles_dir}"
|
|
481
|
+
puts "Create .md files in this directory to define roles."
|
|
482
|
+
return
|
|
483
|
+
end
|
|
484
|
+
|
|
485
|
+
roles.each do |role_id|
|
|
486
|
+
role_file = File.join(roles_dir, "#{role_id}.md")
|
|
487
|
+
fm = AIA::SkillUtils.parse_front_matter(role_file)
|
|
488
|
+
|
|
489
|
+
puts "## #{role_id}"
|
|
490
|
+
puts
|
|
491
|
+
puts "| Key | Value |"
|
|
492
|
+
puts "|-----|-------|"
|
|
493
|
+
fm.each { |key, value| puts "| #{key} | #{value} |" }
|
|
494
|
+
puts
|
|
458
495
|
end
|
|
459
496
|
end
|
|
460
497
|
|
|
@@ -464,6 +501,7 @@ module AIA
|
|
|
464
501
|
|
|
465
502
|
Dir.glob("**/*.md", base: roles_dir)
|
|
466
503
|
.map { |f| f.chomp('.md') }
|
|
504
|
+
.reject { |f| f.split('/').any? { |part| part.start_with?('_') } }
|
|
467
505
|
.sort
|
|
468
506
|
end
|
|
469
507
|
|
|
@@ -519,7 +557,7 @@ module AIA
|
|
|
519
557
|
paths = []
|
|
520
558
|
|
|
521
559
|
if path_list.empty?
|
|
522
|
-
|
|
560
|
+
warn "No list of paths for --tools option"
|
|
523
561
|
exit 1
|
|
524
562
|
end
|
|
525
563
|
|
|
@@ -529,7 +567,7 @@ module AIA
|
|
|
529
567
|
if '.rb' == File.extname(a_path)
|
|
530
568
|
paths << a_path
|
|
531
569
|
else
|
|
532
|
-
|
|
570
|
+
warn "file should have *.rb extension: #{a_path}"
|
|
533
571
|
exit 1
|
|
534
572
|
end
|
|
535
573
|
elsif File.directory?(a_path)
|
|
@@ -537,7 +575,7 @@ module AIA
|
|
|
537
575
|
paths += rb_files
|
|
538
576
|
end
|
|
539
577
|
else
|
|
540
|
-
|
|
578
|
+
warn "file/dir path is not valid: #{a_path}"
|
|
541
579
|
exit 1
|
|
542
580
|
end
|
|
543
581
|
end
|
data/lib/aia/config/defaults.yml
CHANGED
|
@@ -59,12 +59,26 @@ defaults:
|
|
|
59
59
|
prompts:
|
|
60
60
|
dir: ~/.prompts
|
|
61
61
|
extname: .md
|
|
62
|
-
|
|
63
|
-
roles_dir: ~/.prompts/roles
|
|
62
|
+
#
|
|
64
63
|
role: ~
|
|
64
|
+
roles_dir: ~/.prompts/roles
|
|
65
|
+
roles_prefix: roles
|
|
66
|
+
#
|
|
67
|
+
skills: []
|
|
68
|
+
skills_prefix: skills
|
|
69
|
+
#
|
|
70
|
+
tool: ~
|
|
71
|
+
tools_prefix: tools
|
|
72
|
+
#
|
|
65
73
|
system_prompt: ~
|
|
66
74
|
parameter_regex: ~
|
|
67
75
|
|
|
76
|
+
roles:
|
|
77
|
+
dir: ~/.prompts/roles
|
|
78
|
+
|
|
79
|
+
skills:
|
|
80
|
+
dir: ~/.prompts/skills
|
|
81
|
+
|
|
68
82
|
# ---------------------------------------------------------------------------
|
|
69
83
|
# Output Configuration
|
|
70
84
|
# Access: AIA.config.output.file, AIA.config.output.append, etc.
|
|
@@ -112,6 +126,7 @@ defaults:
|
|
|
112
126
|
# Env: AIA_TOOLS__PATHS, AIA_TOOLS__ALLOWED, etc.
|
|
113
127
|
# ---------------------------------------------------------------------------
|
|
114
128
|
tools:
|
|
129
|
+
dir: ~/.prompts/tools
|
|
115
130
|
paths: []
|
|
116
131
|
allowed: ~
|
|
117
132
|
rejected: ~
|
data/lib/aia/config/validator.rb
CHANGED
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
require 'word_wrapper'
|
|
4
4
|
require_relative '../adapter/gem_activator'
|
|
5
|
+
require_relative '../skill_utils'
|
|
5
6
|
|
|
6
7
|
# lib/aia/config/validator.rb
|
|
7
8
|
#
|
|
@@ -31,6 +32,7 @@ module AIA
|
|
|
31
32
|
handle_dump_config(config)
|
|
32
33
|
handle_mcp_list(config)
|
|
33
34
|
handle_list_tools(config)
|
|
35
|
+
handle_list_skills(config)
|
|
34
36
|
handle_completion_script(config)
|
|
35
37
|
validate_required_prompt_id(config)
|
|
36
38
|
process_role_configuration(config)
|
|
@@ -75,7 +77,7 @@ module AIA
|
|
|
75
77
|
|
|
76
78
|
bad_files = remaining_args.reject { |filename| AIA.good_file?(filename) }
|
|
77
79
|
if bad_files.any?
|
|
78
|
-
|
|
80
|
+
warn "Error: The following files do not exist: #{bad_files.join(', ')}"
|
|
79
81
|
exit 1
|
|
80
82
|
end
|
|
81
83
|
|
|
@@ -119,7 +121,7 @@ module AIA
|
|
|
119
121
|
def validate_required_prompt_id(config)
|
|
120
122
|
return unless config.prompt_id.nil? && !(config.flags.chat == true) && !(config.flags.fuzzy == true)
|
|
121
123
|
|
|
122
|
-
|
|
124
|
+
warn "Error: A prompt ID is required unless using --chat, --fuzzy, or providing context files. Use -h or --help for help."
|
|
123
125
|
exit 1
|
|
124
126
|
end
|
|
125
127
|
|
|
@@ -128,14 +130,18 @@ module AIA
|
|
|
128
130
|
return if role.nil? || role.empty?
|
|
129
131
|
|
|
130
132
|
roles_prefix = config.prompts.roles_prefix
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
end
|
|
133
|
+
|
|
134
|
+
unless AIA::SkillUtils.path_based_id?(role) || roles_prefix.nil? || roles_prefix.empty? || role.start_with?(roles_prefix)
|
|
135
|
+
config.prompts.role = "#{roles_prefix}/#{role}"
|
|
136
|
+
role = config.prompts.role
|
|
136
137
|
end
|
|
137
138
|
|
|
138
|
-
config.prompts.roles_dir ||= File.join(config.prompts.dir, roles_prefix)
|
|
139
|
+
config.prompts.roles_dir ||= File.join(config.prompts.dir, roles_prefix.to_s)
|
|
140
|
+
|
|
141
|
+
# In chat-only mode (no prompt_id), leave the role configured so ChatLoop
|
|
142
|
+
# can inject it as initial context. Promoting it to prompt_id would cause
|
|
143
|
+
# PM to receive the role path as a literal string rather than file content.
|
|
144
|
+
return if config.flags&.chat == true
|
|
139
145
|
|
|
140
146
|
if config.prompt_id.nil? || config.prompt_id.empty?
|
|
141
147
|
unless role.nil? || role.empty?
|
|
@@ -230,6 +236,40 @@ module AIA
|
|
|
230
236
|
exit 0
|
|
231
237
|
end
|
|
232
238
|
|
|
239
|
+
def handle_list_skills(config)
|
|
240
|
+
return unless config.list_skills
|
|
241
|
+
|
|
242
|
+
skills_dir = AIA.config.skills.dir
|
|
243
|
+
|
|
244
|
+
unless Dir.exist?(skills_dir)
|
|
245
|
+
$stderr.puts "No skills directory found at #{skills_dir}"
|
|
246
|
+
exit 0
|
|
247
|
+
end
|
|
248
|
+
|
|
249
|
+
skill_dirs = Dir.glob("*/SKILL.md", base: skills_dir).map { |f| File.dirname(f) }.sort
|
|
250
|
+
|
|
251
|
+
if skill_dirs.empty?
|
|
252
|
+
$stderr.puts "No skills found in #{skills_dir}"
|
|
253
|
+
exit 0
|
|
254
|
+
end
|
|
255
|
+
|
|
256
|
+
skill_dirs.each do |skill_name|
|
|
257
|
+
skill_md = File.join(skills_dir, skill_name, 'SKILL.md')
|
|
258
|
+
fm = AIA::SkillUtils.parse_front_matter(skill_md)
|
|
259
|
+
|
|
260
|
+
puts "## #{skill_name}"
|
|
261
|
+
puts
|
|
262
|
+
puts "| Key | Value |"
|
|
263
|
+
puts "|-----|-------|"
|
|
264
|
+
fm.each do |key, value|
|
|
265
|
+
puts "| #{key} | #{value} |"
|
|
266
|
+
end
|
|
267
|
+
puts
|
|
268
|
+
end
|
|
269
|
+
|
|
270
|
+
exit 0
|
|
271
|
+
end
|
|
272
|
+
|
|
233
273
|
def list_tools_terminal(local_tools, mcp_tool_groups)
|
|
234
274
|
width = (ENV['COLUMNS'] || 80).to_i - 4
|
|
235
275
|
indent = ' '
|
|
@@ -449,7 +489,7 @@ module AIA
|
|
|
449
489
|
if File.exist?(script_path)
|
|
450
490
|
puts File.read(script_path)
|
|
451
491
|
else
|
|
452
|
-
|
|
492
|
+
warn "ERROR: The shell '#{shell}' is not supported or the completion script is missing."
|
|
453
493
|
end
|
|
454
494
|
end
|
|
455
495
|
|
|
@@ -457,7 +497,7 @@ module AIA
|
|
|
457
497
|
chat_mode = config.flags.chat == true
|
|
458
498
|
fuzzy_mode = config.flags.fuzzy == true
|
|
459
499
|
if !chat_mode && !fuzzy_mode && (config.prompt_id.nil? || config.prompt_id.empty?) && (config.context_files.nil? || config.context_files.empty?)
|
|
460
|
-
|
|
500
|
+
warn "Error: A prompt ID is required unless using --chat, --fuzzy, or providing context files. Use -h or --help for help."
|
|
461
501
|
exit 1
|
|
462
502
|
end
|
|
463
503
|
end
|
|
@@ -486,7 +526,7 @@ module AIA
|
|
|
486
526
|
|
|
487
527
|
prompt_file_path = File.join(config.prompts.dir, "#{prompt_id}#{config.prompts.extname}")
|
|
488
528
|
unless File.exist?(prompt_file_path)
|
|
489
|
-
|
|
529
|
+
warn "Error: Prompt ID '#{prompt_id}' does not exist at #{prompt_file_path}"
|
|
490
530
|
and_exit = true
|
|
491
531
|
end
|
|
492
532
|
end
|
|
@@ -518,6 +558,7 @@ module AIA
|
|
|
518
558
|
File.write(file, content)
|
|
519
559
|
puts "Config successfully dumped to #{file}"
|
|
520
560
|
end
|
|
561
|
+
|
|
521
562
|
end
|
|
522
563
|
end
|
|
523
564
|
end
|
data/lib/aia/config.rb
CHANGED
|
@@ -48,7 +48,7 @@ module AIA
|
|
|
48
48
|
# ==========================================================================
|
|
49
49
|
|
|
50
50
|
# Nested section attributes (defined as hashes, converted to ConfigSection)
|
|
51
|
-
attr_config :service, :llm, :prompts, :output, :audio, :image, :embedding,
|
|
51
|
+
attr_config :service, :llm, :prompts, :roles, :skills, :output, :audio, :image, :embedding,
|
|
52
52
|
:tools, :flags, :registry, :paths, :logger
|
|
53
53
|
|
|
54
54
|
# Array/collection attributes
|
|
@@ -56,7 +56,7 @@ module AIA
|
|
|
56
56
|
|
|
57
57
|
# Runtime attributes (not loaded from config files)
|
|
58
58
|
attr_accessor :prompt_id, :stdin_content, :remaining_args, :dump_file,
|
|
59
|
-
:completion, :mcp_list, :list_tools,
|
|
59
|
+
:completion, :mcp_list, :list_tools, :list_skills,
|
|
60
60
|
:executable_prompt_content,
|
|
61
61
|
:tool_names, :loaded_tools,
|
|
62
62
|
:log_level_override, :log_file_override,
|
|
@@ -105,6 +105,8 @@ module AIA
|
|
|
105
105
|
service: config_section_coercion(:service),
|
|
106
106
|
llm: config_section_coercion(:llm),
|
|
107
107
|
prompts: config_section_coercion(:prompts),
|
|
108
|
+
roles: config_section_coercion(:roles),
|
|
109
|
+
skills: config_section_coercion(:skills),
|
|
108
110
|
output: config_section_coercion(:output),
|
|
109
111
|
audio: config_section_coercion(:audio),
|
|
110
112
|
image: config_section_coercion(:image),
|
|
@@ -169,6 +171,9 @@ module AIA
|
|
|
169
171
|
prompts_dir: [:prompts, :dir],
|
|
170
172
|
roles_prefix: [:prompts, :roles_prefix],
|
|
171
173
|
role: [:prompts, :role],
|
|
174
|
+
skills_dir: [:skills, :dir],
|
|
175
|
+
skills_prefix: [:prompts, :skills_prefix],
|
|
176
|
+
skills: [:prompts, :skills],
|
|
172
177
|
parameter_regex: [:prompts, :parameter_regex],
|
|
173
178
|
system_prompt: [:prompts, :system_prompt],
|
|
174
179
|
# output section
|
|
@@ -235,6 +240,8 @@ module AIA
|
|
|
235
240
|
llm: llm.to_h,
|
|
236
241
|
models: models.map(&:to_h),
|
|
237
242
|
prompts: prompts.to_h,
|
|
243
|
+
roles: roles.to_h,
|
|
244
|
+
skills: skills.to_h,
|
|
238
245
|
output: output.to_h,
|
|
239
246
|
audio: audio.to_h,
|
|
240
247
|
image: image.to_h,
|
|
@@ -296,7 +303,7 @@ module AIA
|
|
|
296
303
|
send("#{key}=", Array(value)) if respond_to?("#{key}=")
|
|
297
304
|
when :mcp_servers
|
|
298
305
|
self.mcp_servers = Array(value)
|
|
299
|
-
when :service, :llm, :prompts, :output, :audio, :image, :embedding,
|
|
306
|
+
when :service, :llm, :prompts, :roles, :skills, :output, :audio, :image, :embedding,
|
|
300
307
|
:tools, :flags, :registry, :paths, :logger
|
|
301
308
|
section = send(key)
|
|
302
309
|
if section.is_a?(MywayConfig::ConfigSection) && value.is_a?(Hash)
|
|
@@ -388,6 +395,18 @@ module AIA
|
|
|
388
395
|
if output.history_file
|
|
389
396
|
output.history_file = File.expand_path(output.history_file)
|
|
390
397
|
end
|
|
398
|
+
|
|
399
|
+
if roles.dir
|
|
400
|
+
roles.dir = File.expand_path(roles.dir)
|
|
401
|
+
end
|
|
402
|
+
|
|
403
|
+
if skills.dir
|
|
404
|
+
skills.dir = File.expand_path(skills.dir)
|
|
405
|
+
end
|
|
406
|
+
|
|
407
|
+
if tools.dir
|
|
408
|
+
tools.dir = File.expand_path(tools.dir)
|
|
409
|
+
end
|
|
391
410
|
end
|
|
392
411
|
|
|
393
412
|
def ensure_arrays
|
|
@@ -401,6 +420,9 @@ module AIA
|
|
|
401
420
|
|
|
402
421
|
# Ensure tools.paths is an array
|
|
403
422
|
tools.paths = [] if tools.paths.nil?
|
|
423
|
+
|
|
424
|
+
# Ensure prompts.skills is an array
|
|
425
|
+
prompts.skills = [] if prompts.respond_to?(:skills) && prompts.skills.nil?
|
|
404
426
|
end
|
|
405
427
|
|
|
406
428
|
# Process MCP JSON files and merge servers into mcp_servers
|
|
@@ -441,6 +463,10 @@ module AIA
|
|
|
441
463
|
registry.send("#{key}=", value) if registry.respond_to?("#{key}=")
|
|
442
464
|
when :paths
|
|
443
465
|
paths.send("#{key}=", value) if paths.respond_to?("#{key}=")
|
|
466
|
+
when :roles
|
|
467
|
+
roles.send("#{key}=", value) if roles.respond_to?("#{key}=")
|
|
468
|
+
when :skills
|
|
469
|
+
skills.send("#{key}=", value) if skills.respond_to?("#{key}=")
|
|
444
470
|
end
|
|
445
471
|
end
|
|
446
472
|
end
|