aia 0.9.24 → 0.10.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. checksums.yaml +4 -4
  2. data/.version +1 -1
  3. data/CHANGELOG.md +84 -3
  4. data/README.md +179 -59
  5. data/bin/aia +6 -0
  6. data/docs/cli-reference.md +145 -72
  7. data/docs/configuration.md +156 -19
  8. data/docs/examples/tools/index.md +2 -2
  9. data/docs/faq.md +11 -11
  10. data/docs/guides/available-models.md +11 -11
  11. data/docs/guides/basic-usage.md +18 -17
  12. data/docs/guides/chat.md +57 -11
  13. data/docs/guides/executable-prompts.md +15 -15
  14. data/docs/guides/first-prompt.md +2 -2
  15. data/docs/guides/getting-started.md +6 -6
  16. data/docs/guides/image-generation.md +24 -24
  17. data/docs/guides/local-models.md +2 -2
  18. data/docs/guides/models.md +96 -18
  19. data/docs/guides/tools.md +4 -4
  20. data/docs/installation.md +2 -2
  21. data/docs/prompt_management.md +11 -11
  22. data/docs/security.md +3 -3
  23. data/docs/workflows-and-pipelines.md +1 -1
  24. data/examples/README.md +6 -6
  25. data/examples/headlines +3 -3
  26. data/lib/aia/aia_completion.bash +2 -2
  27. data/lib/aia/aia_completion.fish +4 -4
  28. data/lib/aia/aia_completion.zsh +2 -2
  29. data/lib/aia/chat_processor_service.rb +31 -21
  30. data/lib/aia/config/cli_parser.rb +403 -403
  31. data/lib/aia/config/config_section.rb +87 -0
  32. data/lib/aia/config/defaults.yml +219 -0
  33. data/lib/aia/config/defaults_loader.rb +147 -0
  34. data/lib/aia/config/mcp_parser.rb +151 -0
  35. data/lib/aia/config/model_spec.rb +67 -0
  36. data/lib/aia/config/validator.rb +185 -136
  37. data/lib/aia/config.rb +336 -17
  38. data/lib/aia/directive_processor.rb +14 -6
  39. data/lib/aia/directives/configuration.rb +24 -10
  40. data/lib/aia/directives/models.rb +3 -4
  41. data/lib/aia/directives/utility.rb +3 -2
  42. data/lib/aia/directives/web_and_file.rb +50 -47
  43. data/lib/aia/logger.rb +328 -0
  44. data/lib/aia/prompt_handler.rb +18 -22
  45. data/lib/aia/ruby_llm_adapter.rb +572 -69
  46. data/lib/aia/session.rb +9 -8
  47. data/lib/aia/ui_presenter.rb +20 -16
  48. data/lib/aia/utility.rb +50 -18
  49. data/lib/aia.rb +91 -66
  50. data/lib/extensions/ruby_llm/modalities.rb +2 -0
  51. data/mcp_servers/apple-mcp.json +8 -0
  52. data/mcp_servers/mcp_server_chart.json +11 -0
  53. data/mcp_servers/playwright_one.json +8 -0
  54. data/mcp_servers/playwright_two.json +8 -0
  55. data/mcp_servers/tavily_mcp_server.json +8 -0
  56. metadata +83 -25
  57. data/lib/aia/config/base.rb +0 -308
  58. data/lib/aia/config/defaults.rb +0 -91
  59. data/lib/aia/config/file_loader.rb +0 -163
  60. data/mcp_servers/imcp.json +0 -7
  61. data/mcp_servers/launcher.json +0 -11
  62. data/mcp_servers/timeserver.json +0 -8
@@ -48,6 +48,24 @@ aia --terse my_prompt
48
48
  aia --terse --chat
49
49
  ```
50
50
 
51
+ ### `--tokens`
52
+ Display token usage information after each response in chat mode. Shows input tokens, output tokens, and model ID.
53
+
54
+ ```bash
55
+ aia --chat --tokens
56
+ aia --chat --tokens --model gpt-4
57
+ ```
58
+
59
+ ### `--cost`
60
+ Include cost calculations with token usage. Automatically enables `--tokens`. Shows estimated cost based on the model's pricing.
61
+
62
+ ```bash
63
+ aia --chat --cost
64
+ aia --chat --cost --model gpt-4,claude-3-sonnet
65
+ ```
66
+
67
+ **Note**: `--cost` implies `--tokens`, so you don't need to specify both.
68
+
51
69
  ## Adapter Options
52
70
 
53
71
  ### `--adapter ADAPTER`
@@ -59,24 +77,24 @@ aia --adapter ruby_llm
59
77
 
60
78
  **Valid adapters**: `ruby_llm`
61
79
 
62
- ### `--available_models [QUERY]`
80
+ ### `--available-models [QUERY]`
63
81
  List (then exit) available models that match the optional query. Query is a comma-separated list of AND components.
64
82
 
65
83
  ```bash
66
84
  # List all models
67
- aia --available_models
85
+ aia --available-models
68
86
 
69
87
  # Filter by provider
70
- aia --available_models openai
88
+ aia --available-models openai
71
89
 
72
90
  # Filter by capability and provider
73
- aia --available_models openai,mini
91
+ aia --available-models openai,mini
74
92
 
75
93
  # Filter by modality
76
- aia --available_models text_to_text
94
+ aia --available-models text_to_text
77
95
 
78
96
  # Complex filter
79
- aia --available_models openai,gpt,text_to_image
97
+ aia --available-models openai,gpt,text_to_image
80
98
  ```
81
99
 
82
100
  ## Model Options
@@ -122,47 +140,47 @@ aia --model "gpt-4,claude-3-sonnet" --consensus my_prompt
122
140
  aia --model "gpt-4,claude-3-sonnet" --no-consensus my_prompt
123
141
  ```
124
142
 
125
- ### `--sm, --speech_model MODEL`
143
+ ### `--sm, --speech-model MODEL`
126
144
  Speech model to use for text-to-speech functionality.
127
145
 
128
146
  ```bash
129
- aia --speech_model tts-1 --speak my_prompt
147
+ aia --speech-model tts-1 --speak my_prompt
130
148
  aia --sm tts-1-hd --speak my_prompt
131
149
  ```
132
150
 
133
- ### `--tm, --transcription_model MODEL`
151
+ ### `--tm, --transcription-model MODEL`
134
152
  Transcription model to use for speech-to-text functionality.
135
153
 
136
154
  ```bash
137
- aia --transcription_model whisper-1 audio_file.wav
155
+ aia --transcription-model whisper-1 audio_file.wav
138
156
  aia --tm whisper-1 my_audio.mp3
139
157
  ```
140
158
 
141
159
  ## File Options
142
160
 
143
- ### `-c, --config_file FILE`
161
+ ### `-c, --config-file FILE`
144
162
  Load configuration from a specific file.
145
163
 
146
164
  ```bash
147
- aia --config_file /path/to/config.yml my_prompt
165
+ aia --config-file /path/to/config.yml my_prompt
148
166
  aia -c ~/.aia/custom_config.yml my_prompt
149
167
  ```
150
168
 
151
- ### `-o, --[no-]out_file [FILE]`
169
+ ### `-o, --[no-]output [FILE]`
152
170
  Output file for saving AI responses.
153
171
 
154
172
  ```bash
155
173
  # Save to default file (temp.md)
156
- aia --out_file my_prompt
174
+ aia --output my_prompt
157
175
 
158
176
  # Save to specific file
159
- aia --out_file output.txt my_prompt
177
+ aia --output output.txt my_prompt
160
178
 
161
179
  # Use absolute path
162
- aia --out_file /tmp/ai_response.md my_prompt
180
+ aia --output /tmp/ai_response.md my_prompt
163
181
 
164
182
  # Disable file output
165
- aia --no-out_file my_prompt
183
+ aia --no-output my_prompt
166
184
  ```
167
185
 
168
186
  ### `-a, --[no-]append`
@@ -170,24 +188,24 @@ Append to output file instead of overwriting.
170
188
 
171
189
  ```bash
172
190
  # Append mode
173
- aia --out_file log.md --append my_prompt
191
+ aia --output log.md --append my_prompt
174
192
 
175
193
  # Overwrite mode (default)
176
- aia --out_file log.md --no-append my_prompt
194
+ aia --output log.md --no-append my_prompt
177
195
  ```
178
196
 
179
- ### `-l, --[no-]log_file [FILE]`
180
- Log file for AIA operations.
197
+ ### `--[no-]history-file [FILE]`
198
+ Conversation history file for logging prompts and responses.
181
199
 
182
200
  ```bash
183
- # Enable logging to default location
184
- aia --log_file my_prompt
201
+ # Enable history logging to default location
202
+ aia --history-file my_prompt
185
203
 
186
204
  # Log to specific file
187
- aia --log_file /var/log/aia.log my_prompt
205
+ aia --history-file /var/log/aia_history.log my_prompt
188
206
 
189
- # Disable logging
190
- aia --no-log_file my_prompt
207
+ # Disable history logging
208
+ aia --no-history-file my_prompt
191
209
  ```
192
210
 
193
211
  ### `--md, --[no-]markdown`
@@ -203,20 +221,20 @@ aia --no-markdown my_prompt
203
221
 
204
222
  ## Prompt Options
205
223
 
206
- ### `--prompts_dir DIR`
224
+ ### `--prompts-dir DIR`
207
225
  Directory containing prompt files.
208
226
 
209
227
  ```bash
210
- aia --prompts_dir /custom/prompts my_prompt
211
- aia --prompts_dir ~/work/prompts my_prompt
228
+ aia --prompts-dir /custom/prompts my_prompt
229
+ aia --prompts-dir ~/work/prompts my_prompt
212
230
  ```
213
231
 
214
- ### `--roles_prefix PREFIX`
232
+ ### `--roles-prefix PREFIX`
215
233
  Subdirectory name for role files (default: `roles`).
216
234
 
217
235
  ```bash
218
236
  # Use custom roles directory
219
- aia --roles_prefix personas --role expert
237
+ aia --roles-prefix personas --role expert
220
238
 
221
239
  # Results in looking for roles in ~/.prompts/personas/expert.txt
222
240
  ```
@@ -260,12 +278,12 @@ aia --list-roles
260
278
 
261
279
  Roles are discovered from:
262
280
  - **Default location**: `~/.prompts/roles/`
263
- - **Custom location**: Set via `--prompts_dir` and `--roles_prefix`
281
+ - **Custom location**: Set via `--prompts-dir` and `--roles-prefix`
264
282
  - **Nested directories**: Supports subdirectories like `roles/software/architect.txt`
265
283
 
266
284
  **Use case**: Discover available roles before using them with `--role` or inline `MODEL=ROLE` syntax.
267
285
 
268
- **See also**: `--role`, `--model`, `--prompts_dir`, `--roles_prefix`
286
+ **See also**: `--role`, `--model`, `--prompts-dir`, `--roles-prefix`
269
287
 
270
288
  ### `-n, --next PROMPT_ID`
271
289
  Next prompt to process (can be used multiple times to build a pipeline).
@@ -294,12 +312,12 @@ aia --exec my_script_prompt
294
312
  aia --no-exec my_script_prompt
295
313
  ```
296
314
 
297
- ### `--system_prompt PROMPT_ID`
315
+ ### `--system-prompt PROMPT_ID`
298
316
  System prompt ID to use for chat sessions.
299
317
 
300
318
  ```bash
301
- aia --system_prompt helpful_assistant --chat
302
- aia --system_prompt code_expert --chat my_code.py
319
+ aia --system-prompt helpful_assistant --chat
320
+ aia --system-prompt code_expert --chat my_code.py
303
321
  ```
304
322
 
305
323
  ### `--regex PATTERN`
@@ -329,42 +347,42 @@ aia --temperature 1.5 creative_writing
329
347
  aia -t 2.0 brainstorm_ideas
330
348
  ```
331
349
 
332
- ### `--max_tokens TOKENS`
350
+ ### `--max-tokens TOKENS`
333
351
  Maximum tokens for text generation.
334
352
 
335
353
  ```bash
336
- aia --max_tokens 100 short_summary
337
- aia --max_tokens 4000 detailed_analysis
354
+ aia --max-tokens 100 short_summary
355
+ aia --max-tokens 4000 detailed_analysis
338
356
  ```
339
357
 
340
- ### `--top_p VALUE`
358
+ ### `--top-p VALUE`
341
359
  Top-p sampling value (0.0 to 1.0). Alternative to temperature for controlling randomness.
342
360
 
343
361
  ```bash
344
- aia --top_p 0.1 precise_answer
345
- aia --top_p 0.9 creative_response
362
+ aia --top-p 0.1 precise_answer
363
+ aia --top-p 0.9 creative_response
346
364
  ```
347
365
 
348
- ### `--frequency_penalty VALUE`
366
+ ### `--frequency-penalty VALUE`
349
367
  Frequency penalty (-2.0 to 2.0). Positive values discourage repetition.
350
368
 
351
369
  ```bash
352
370
  # Discourage repetition
353
- aia --frequency_penalty 0.5 my_prompt
371
+ aia --frequency-penalty 0.5 my_prompt
354
372
 
355
373
  # Encourage repetition
356
- aia --frequency_penalty -0.5 my_prompt
374
+ aia --frequency-penalty -0.5 my_prompt
357
375
  ```
358
376
 
359
- ### `--presence_penalty VALUE`
377
+ ### `--presence-penalty VALUE`
360
378
  Presence penalty (-2.0 to 2.0). Positive values encourage discussing new topics.
361
379
 
362
380
  ```bash
363
381
  # Encourage new topics
364
- aia --presence_penalty 0.5 broad_discussion
382
+ aia --presence-penalty 0.5 broad_discussion
365
383
 
366
384
  # Focus on current topics
367
- aia --presence_penalty -0.5 deep_dive
385
+ aia --presence-penalty -0.5 deep_dive
368
386
  ```
369
387
 
370
388
  ## Audio/Image Options
@@ -389,32 +407,32 @@ aia --voice onyx --speak my_prompt
389
407
  aia --voice shimmer --speak my_prompt
390
408
  ```
391
409
 
392
- ### `--is, --image_size SIZE`
410
+ ### `--is, --image-size SIZE`
393
411
  Image size for image generation.
394
412
 
395
413
  ```bash
396
- aia --image_size 1024x1024 image_prompt
414
+ aia --image-size 1024x1024 image_prompt
397
415
  aia --is 1792x1024 wide_image
398
416
  aia --is 1024x1792 tall_image
399
417
  ```
400
418
 
401
419
  **Common sizes**: `256x256`, `512x512`, `1024x1024`, `1792x1024`, `1024x1792`
402
420
 
403
- ### `--iq, --image_quality QUALITY`
421
+ ### `--iq, --image-quality QUALITY`
404
422
  Image quality for image generation.
405
423
 
406
424
  ```bash
407
- aia --image_quality standard image_prompt
425
+ aia --image-quality standard image_prompt
408
426
  aia --iq hd high_quality_image
409
427
  ```
410
428
 
411
429
  **Values**: `standard`, `hd`
412
430
 
413
- ### `--style, --image_style STYLE`
431
+ ### `--style, --image-style STYLE`
414
432
  Style for image generation.
415
433
 
416
434
  ```bash
417
- aia --image_style vivid colorful_image
435
+ aia --image-style vivid colorful_image
418
436
  aia --style natural realistic_image
419
437
  ```
420
438
 
@@ -447,28 +465,37 @@ aia --tools ./tools/ my_prompt
447
465
  aia --tools "./tools/,./special_tool.rb" my_prompt
448
466
  ```
449
467
 
450
- ### `--at, --allowed_tools TOOLS_LIST`
468
+ ### `--at, --allowed-tools TOOLS_LIST`
451
469
  Allow only these tools to be used. Security feature to restrict tool access.
452
470
 
453
471
  ```bash
454
472
  # Allow specific tools
455
- aia --allowed_tools "calculator,file_reader" my_prompt
473
+ aia --allowed-tools "calculator,file_reader" my_prompt
456
474
  aia --at "web_scraper,data_analyzer" analysis_prompt
457
475
  ```
458
476
 
459
- ### `--rt, --rejected_tools TOOLS_LIST`
477
+ ### `--rt, --rejected-tools TOOLS_LIST`
460
478
  Reject/block these tools from being used.
461
479
 
462
480
  ```bash
463
481
  # Block dangerous tools
464
- aia --rejected_tools "file_writer,system_command" my_prompt
482
+ aia --rejected-tools "file_writer,system_command" my_prompt
465
483
  aia --rt "network_access" secure_prompt
466
484
  ```
467
485
 
468
486
  ## Utility Options
469
487
 
488
+ ### Log Level Options
489
+
490
+ AIA provides multiple log level options to control logging verbosity. These options set the log level for all three loggers:
491
+ - **aia**: Used within the AIA codebase for application-level logging
492
+ - **llm**: Passed to the RubyLLM gem's configuration (`RubyLLM.logger`)
493
+ - **mcp**: Passed to the RubyLLM::MCP process (`RubyLLM::MCP.logger`)
494
+
495
+ Only one log level option should be used at a time.
496
+
470
497
  ### `-d, --debug`
471
- Enable debug output for troubleshooting.
498
+ Enable debug output (most verbose) and set all loggers to DEBUG level. Also sets `$DEBUG_ME = true` for the debug_me gem.
472
499
 
473
500
  ```bash
474
501
  aia --debug my_prompt
@@ -476,12 +503,49 @@ aia -d --chat
476
503
  ```
477
504
 
478
505
  ### `--no-debug`
479
- Explicitly disable debug output.
506
+ Explicitly disable debug output. Sets `$DEBUG_ME = false`.
480
507
 
481
508
  ```bash
482
509
  aia --no-debug my_prompt
483
510
  ```
484
511
 
512
+ ### `--info`
513
+ Set all loggers to INFO level. Shows informational messages and above.
514
+
515
+ ```bash
516
+ aia --info my_prompt
517
+ aia --info --chat
518
+ ```
519
+
520
+ ### `--warn`
521
+ Set all loggers to WARN level (this is the default). Shows warnings, errors, and fatal messages.
522
+
523
+ ```bash
524
+ aia --warn my_prompt
525
+ ```
526
+
527
+ ### `--error`
528
+ Set all loggers to ERROR level. Shows only errors and fatal messages.
529
+
530
+ ```bash
531
+ aia --error my_prompt
532
+ aia --error --chat
533
+ ```
534
+
535
+ ### `--fatal`
536
+ Set all loggers to FATAL level (least verbose). Shows only critical/fatal messages.
537
+
538
+ ```bash
539
+ aia --fatal my_prompt
540
+ ```
541
+
542
+ **Log Level Hierarchy** (from most to least verbose):
543
+ 1. `debug` - All messages including detailed debugging information
544
+ 2. `info` - Informational messages and above
545
+ 3. `warn` - Warnings, errors, and fatal messages (default)
546
+ 4. `error` - Only errors and fatal messages
547
+ 5. `fatal` - Only critical/fatal messages
548
+
485
549
  ### `-v, --[no-]verbose`
486
550
  Enable/disable verbose output.
487
551
 
@@ -575,10 +639,10 @@ aia --model "gpt-4,claude-3-sonnet" --consensus analysis_prompt data.csv
575
639
  aia --model gpt-4 --temperature 1.2 --speak --voice nova story_prompt
576
640
 
577
641
  # Secure tool usage
578
- aia --tools ./safe_tools/ --allowed_tools "calculator,file_reader" --rejected_tools "system_command" analysis_prompt
642
+ aia --tools ./safe_tools/ --allowed-tools "calculator,file_reader" --rejected-tools "system_command" analysis_prompt
579
643
 
580
644
  # Pipeline with custom configuration
581
- aia --pipeline "extract,analyze,summarize" --temperature 0.3 --max_tokens 2000 --out_file report.md data_source.txt
645
+ aia --pipeline "extract,analyze,summarize" --temperature 0.3 --max-tokens 2000 --output report.md data_source.txt
582
646
 
583
647
  # Debug mode with verbose output
584
648
  aia --debug --verbose --model claude-3-sonnet problematic_prompt
@@ -588,13 +652,13 @@ aia --debug --verbose --model claude-3-sonnet problematic_prompt
588
652
 
589
653
  ```bash
590
654
  # Use custom configuration
591
- aia --config_file ./project_config.yml --prompts_dir ./project_prompts/ my_prompt
655
+ aia --config-file ./project_config.yml --prompts-dir ./project_prompts/ my_prompt
592
656
 
593
657
  # Save output with markdown formatting
594
- aia --out_file analysis.md --markdown --append data_analysis dataset.csv
658
+ aia --output analysis.md --markdown --append data_analysis dataset.csv
595
659
 
596
660
  # Audio processing
597
- aia --transcription_model whisper-1 --speech_model tts-1-hd --voice echo audio_prompt audio_file.wav
661
+ aia --transcription-model whisper-1 --speech-model tts-1-hd --voice echo audio_prompt audio_file.wav
598
662
  ```
599
663
 
600
664
  ## Exit Codes
@@ -607,10 +671,11 @@ aia --transcription_model whisper-1 --speech_model tts-1-hd --voice echo audio_p
607
671
 
608
672
  ## Environment Variables
609
673
 
610
- Many CLI options have corresponding environment variables with the `AIA_` prefix:
674
+ Many CLI options have corresponding environment variables with the `AIA_` prefix.
675
+ Use double underscore (`__`) for nested configuration sections:
611
676
 
612
677
  ```bash
613
- # Basic model configuration
678
+ # Model configuration (top-level)
614
679
  export AIA_MODEL="gpt-4"
615
680
 
616
681
  # Model with inline role syntax
@@ -619,11 +684,19 @@ export AIA_MODEL="gpt-4o=architect"
619
684
  # Multiple models with roles
620
685
  export AIA_MODEL="gpt-4o=architect,claude=security,gemini=performance"
621
686
 
622
- # Other configuration
623
- export AIA_TEMPERATURE="0.8"
624
- export AIA_PROMPTS_DIR="/custom/prompts"
625
- export AIA_VERBOSE="true"
626
- export AIA_DEBUG="false"
687
+ # LLM settings (nested under llm:)
688
+ export AIA_LLM__TEMPERATURE="0.8"
689
+
690
+ # Prompts settings (nested under prompts:)
691
+ export AIA_PROMPTS__DIR="/custom/prompts"
692
+
693
+ # Flags (nested under flags:)
694
+ export AIA_FLAGS__VERBOSE="true"
695
+ export AIA_FLAGS__DEBUG="false"
696
+ export AIA_FLAGS__CHAT="true"
697
+
698
+ # Output settings (nested under output:)
699
+ export AIA_OUTPUT__FILE="./output.md"
627
700
  ```
628
701
 
629
702
  **Note**: The `AIA_MODEL` environment variable supports the same inline `MODEL=ROLE` syntax as the `--model` CLI option.
@@ -35,7 +35,7 @@ frequency_penalty: 0.0 # Repetition penalty (-2.0 to 2.0)
35
35
  presence_penalty: 0.0 # Topic penalty (-2.0 to 2.0)
36
36
 
37
37
  # Output Settings
38
- out_file: null # Output file (null = no file output)
38
+ output: null # Output file (null = no file output)
39
39
  append: false # Append to output file instead of overwriting
40
40
  markdown: true # Format output with Markdown
41
41
  verbose: false # Show detailed output
@@ -72,8 +72,23 @@ pipeline: [] # Default prompt pipeline
72
72
  executable_prompt: false # Run prompts as executables
73
73
 
74
74
  # Logging
75
- log_file: null # Log file path
75
+ history_file: null # Conversation history file for logging prompts and responses
76
76
  refresh: 7 # Model database refresh interval (days)
77
+
78
+ # Logger Configuration (for detailed logging control)
79
+ logger:
80
+ aia: # AIA application logging
81
+ file: STDOUT # STDOUT, STDERR, or a file path
82
+ level: warn # debug, info, warn, error, fatal
83
+ flush: true # Immediate write (no buffering)
84
+ llm: # RubyLLM gem logging
85
+ file: STDOUT
86
+ level: warn
87
+ flush: true
88
+ mcp: # RubyLLM::MCP gem logging
89
+ file: STDOUT
90
+ level: warn
91
+ flush: true
77
92
  ```
78
93
 
79
94
  ### Model-Specific Configuration
@@ -95,27 +110,48 @@ max_tokens: 8000
95
110
 
96
111
  ## Environment Variables
97
112
 
98
- All configuration options can be set via environment variables with the `AIA_` prefix:
113
+ All configuration options can be set via environment variables with the `AIA_` prefix.
114
+ Use double underscore (`__`) for nested configuration sections:
99
115
 
100
116
  ```bash
101
- # Core settings
117
+ # LLM settings (nested under llm:)
118
+ export AIA_LLM__TEMPERATURE="0.8"
119
+ export AIA_LLM__ADAPTER="ruby_llm"
120
+
121
+ # Models (top-level array, supports MODEL=ROLE syntax)
102
122
  export AIA_MODEL="gpt-4"
103
- export AIA_TEMPERATURE="0.8"
104
- export AIA_PROMPTS_DIR="/path/to/my/prompts"
123
+ export AIA_MODEL="gpt-4o=architect,claude=reviewer"
124
+
125
+ # Prompts settings (nested under prompts:)
126
+ export AIA_PROMPTS__DIR="/path/to/my/prompts"
127
+ export AIA_PROMPTS__ROLES_PREFIX="roles"
105
128
 
106
129
  # API Keys (handled by RubyLLM)
107
130
  export OPENAI_API_KEY="your_key_here"
108
- export ANTHROPIC_API_KEY="your_key_here"
131
+ export ANTHROPIC_API_KEY="your_key_here"
109
132
  export GOOGLE_API_KEY="your_key_here"
110
133
 
111
- # Chat settings
112
- export AIA_CHAT="true"
113
- export AIA_VERBOSE="true"
114
- export AIA_DEBUG="false"
134
+ # Flags (nested under flags:)
135
+ export AIA_FLAGS__CHAT="true"
136
+ export AIA_FLAGS__VERBOSE="true"
137
+ export AIA_FLAGS__DEBUG="false"
138
+ export AIA_FLAGS__TOKENS="true"
139
+ export AIA_FLAGS__COST="true"
140
+
141
+ # Output settings (nested under output:)
142
+ export AIA_OUTPUT__FILE="/tmp/aia_output.md"
143
+ export AIA_OUTPUT__MARKDOWN="true"
144
+ export AIA_OUTPUT__LOG_FILE="~/.prompts/_prompts.log"
145
+
146
+ # Tools settings (nested under tools:)
147
+ export AIA_TOOLS__PATHS="/path/to/tools"
148
+ export AIA_TOOLS__REJECTED="dangerous_tool"
115
149
 
116
- # Output settings
117
- export AIA_OUT_FILE="/tmp/aia_output.md"
118
- export AIA_MARKDOWN="true"
150
+ # Registry settings (nested under registry:)
151
+ export AIA_REGISTRY__REFRESH="7"
152
+
153
+ # Paths settings (nested under paths:)
154
+ export AIA_PATHS__AIA_DIR="~/.aia"
119
155
  ```
120
156
 
121
157
  ## Command Line Arguments
@@ -134,6 +170,107 @@ Prompts can contain configuration directives that override all other settings:
134
170
  Write a creative story about...
135
171
  ```
136
172
 
173
+ ## Logger Configuration
174
+
175
+ AIA uses the Lumberjack gem for logging and manages three separate loggers:
176
+
177
+ | Logger | Purpose |
178
+ |--------|---------|
179
+ | `aia` | Used within the AIA codebase for application-level logging |
180
+ | `llm` | Passed to the RubyLLM gem's configuration (`RubyLLM.logger`) |
181
+ | `mcp` | Passed to the RubyLLM::MCP process (`RubyLLM::MCP.logger`) |
182
+
183
+ ### Configuration File Settings
184
+
185
+ Each logger can be configured independently in your `~/.aia/config.yml`:
186
+
187
+ ```yaml
188
+ logger:
189
+ aia:
190
+ file: STDOUT # STDOUT, STDERR, or a file path (e.g., ~/.aia/aia.log)
191
+ level: warn # debug, info, warn, error, fatal
192
+ flush: true # true = immediate write, false = buffered
193
+ llm:
194
+ file: STDOUT
195
+ level: warn
196
+ flush: true
197
+ mcp:
198
+ file: STDOUT
199
+ level: warn
200
+ flush: true
201
+ ```
202
+
203
+ **Note**: All three loggers can safely write to the same file path. AIA handles multi-process safe file writes with automatic log file rotation (daily).
204
+
205
+ ### CLI Log Level Override
206
+
207
+ Command-line log level options override the config file settings for ALL loggers:
208
+
209
+ ```bash
210
+ # Set all loggers to debug level
211
+ aia --debug my_prompt
212
+
213
+ # Set all loggers to info level
214
+ aia --info my_prompt
215
+
216
+ # Set all loggers to warn level (default)
217
+ aia --warn my_prompt
218
+
219
+ # Set all loggers to error level
220
+ aia --error my_prompt
221
+
222
+ # Set all loggers to fatal level
223
+ aia --fatal my_prompt
224
+ ```
225
+
226
+ ### Environment Variables
227
+
228
+ Logger settings can also be configured via environment variables:
229
+
230
+ ```bash
231
+ # AIA logger settings
232
+ export AIA_LOGGER__AIA__FILE="~/.aia/aia.log"
233
+ export AIA_LOGGER__AIA__LEVEL="debug"
234
+ export AIA_LOGGER__AIA__FLUSH="true"
235
+
236
+ # LLM logger settings
237
+ export AIA_LOGGER__LLM__FILE="STDOUT"
238
+ export AIA_LOGGER__LLM__LEVEL="info"
239
+
240
+ # MCP logger settings
241
+ export AIA_LOGGER__MCP__FILE="STDERR"
242
+ export AIA_LOGGER__MCP__LEVEL="warn"
243
+ ```
244
+
245
+ ### Log Levels
246
+
247
+ | Level | Description |
248
+ |-------|-------------|
249
+ | `debug` | Most verbose - all messages including detailed debugging info |
250
+ | `info` | Informational messages and above |
251
+ | `warn` | Warnings, errors, and fatal messages (default) |
252
+ | `error` | Only errors and fatal messages |
253
+ | `fatal` | Least verbose - only critical/fatal messages |
254
+
255
+ ### Example: File-Based Logging
256
+
257
+ ```yaml
258
+ # ~/.aia/config.yml - Log everything to files
259
+ logger:
260
+ aia:
261
+ file: ~/.aia/logs/aia.log
262
+ level: info
263
+ flush: true
264
+ llm:
265
+ file: ~/.aia/logs/llm.log
266
+ level: debug
267
+ flush: false
268
+ mcp:
269
+ file: ~/.aia/logs/mcp.log
270
+ level: warn
271
+ flush: true
272
+ ```
273
+
137
274
  ## Advanced Configuration
138
275
 
139
276
  ### Multi-Model Configuration
@@ -227,7 +364,7 @@ model: gpt-4
227
364
  temperature: 0.3
228
365
  verbose: true
229
366
  debug: true
230
- out_file: ./dev_output.md
367
+ output: ./dev_output.md
231
368
  prompts_dir: ./prompts
232
369
  tool_paths: [./tools]
233
370
  ```
@@ -241,7 +378,7 @@ model: gpt-3.5-turbo
241
378
  temperature: 0.7
242
379
  verbose: false
243
380
  debug: false
244
- log_file: /var/log/aia.log
381
+ history_file: /var/log/aia_history.log
245
382
  prompts_dir: /etc/aia/prompts
246
383
  tool_paths: [/usr/share/aia-tools]
247
384
  allowed_tools: [safe_calculator, file_reader]
@@ -258,7 +395,7 @@ max_tokens: 4000
258
395
  speak: true
259
396
  voice: nova
260
397
  markdown: true
261
- out_file: ~/writing/aia_output.md
398
+ output: ~/writing/aia_output.md
262
399
  append: true
263
400
  ```
264
401
 
@@ -276,7 +413,7 @@ aia --dump config.yaml
276
413
 
277
414
  ```bash
278
415
  # Test model access
279
- aia --available_models
416
+ aia --available-models
280
417
 
281
418
  # Test configuration
282
419
  aia --debug --verbose hello_world
@@ -289,7 +426,7 @@ aia --tools ./my_tools --debug test_prompt
289
426
 
290
427
  #### Model Not Found
291
428
  - Check your API keys are set
292
- - Verify the model name: `aia --available_models`
429
+ - Verify the model name: `aia --available-models`
293
430
  - Check network connectivity
294
431
 
295
432
  #### Permission Errors