aia 0.5.18 → 0.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. checksums.yaml +4 -4
  2. data/.envrc +1 -0
  3. data/.version +1 -1
  4. data/CHANGELOG.md +39 -5
  5. data/README.md +388 -219
  6. data/Rakefile +16 -5
  7. data/_notes.txt +231 -0
  8. data/bin/aia +3 -2
  9. data/examples/README.md +140 -0
  10. data/examples/headlines +21 -0
  11. data/lib/aia/ai_client_adapter.rb +210 -0
  12. data/lib/aia/chat_processor_service.rb +120 -0
  13. data/lib/aia/config.rb +473 -4
  14. data/lib/aia/context_manager.rb +58 -0
  15. data/lib/aia/directive_processor.rb +267 -0
  16. data/lib/aia/{tools/fzf.rb → fzf.rb} +9 -17
  17. data/lib/aia/history_manager.rb +85 -0
  18. data/lib/aia/prompt_handler.rb +178 -0
  19. data/lib/aia/session.rb +215 -0
  20. data/lib/aia/shell_command_executor.rb +109 -0
  21. data/lib/aia/ui_presenter.rb +110 -0
  22. data/lib/aia/utility.rb +24 -0
  23. data/lib/aia/version.rb +9 -6
  24. data/lib/aia.rb +57 -61
  25. data/lib/extensions/openstruct_merge.rb +44 -0
  26. metadata +29 -43
  27. data/LICENSE.txt +0 -21
  28. data/doc/aia_and_pre_compositional_prompts.md +0 -474
  29. data/lib/aia/clause.rb +0 -7
  30. data/lib/aia/cli.rb +0 -452
  31. data/lib/aia/directives.rb +0 -142
  32. data/lib/aia/dynamic_content.rb +0 -26
  33. data/lib/aia/logging.rb +0 -62
  34. data/lib/aia/main.rb +0 -265
  35. data/lib/aia/prompt.rb +0 -275
  36. data/lib/aia/tools/ai_client_backend.rb +0 -92
  37. data/lib/aia/tools/backend_common.rb +0 -58
  38. data/lib/aia/tools/client.rb +0 -197
  39. data/lib/aia/tools/editor.rb +0 -52
  40. data/lib/aia/tools/glow.rb +0 -90
  41. data/lib/aia/tools/llm.rb +0 -77
  42. data/lib/aia/tools/mods.rb +0 -100
  43. data/lib/aia/tools/sgpt.rb +0 -79
  44. data/lib/aia/tools/subl.rb +0 -68
  45. data/lib/aia/tools/vim.rb +0 -93
  46. data/lib/aia/tools.rb +0 -88
  47. data/lib/aia/user_query.rb +0 -21
  48. data/lib/core_ext/string_wrap.rb +0 -73
  49. data/lib/core_ext/tty-spinner_log.rb +0 -25
  50. data/man/aia.1 +0 -272
  51. data/man/aia.1.md +0 -236
@@ -0,0 +1,120 @@
1
+ # lib/aia/chat_processor_service.rb
2
+
3
+
4
+ require_relative 'shell_command_executor'
5
+
6
+ module AIA
7
+ class ChatProcessorService
8
+ def initialize(ui_presenter, directive_processor = nil)
9
+ @ui_presenter = ui_presenter
10
+ @speaker = AIA.speak? ? AiClient.new(AIA.config.speech_model) : nil
11
+ @directive_processor = directive_processor
12
+ end
13
+
14
+
15
+
16
+ def speak(text)
17
+ return unless AIA.speak?
18
+
19
+ @speaker ||= AiClient.new(AIA.config.speech_model) if AIA.config.speech_model
20
+
21
+ if @speaker
22
+ `#{AIA.config.speak_command} #{@speaker.speak(text).path}`
23
+ else
24
+ puts "Warning: Unable to speak. Speech model not configured properly."
25
+ end
26
+ end
27
+
28
+
29
+
30
+ def process_prompt(prompt, operation_type)
31
+ @ui_presenter.with_spinner("Processing", operation_type) do
32
+ send_to_client(prompt, operation_type)
33
+ end
34
+ end
35
+
36
+
37
+
38
+ def send_to_client(prompt, operation_type)
39
+ maybe_change_model
40
+
41
+ case operation_type
42
+ when :text_to_text
43
+ AIA.client.chat(prompt)
44
+ when :text_to_image
45
+ AIA.client.chat(prompt)
46
+ when :image_to_text
47
+ AIA.client.chat(prompt)
48
+ when :text_to_audio
49
+ AIA.client.chat(prompt)
50
+ when :audio_to_text
51
+ if prompt.strip.end_with?('.mp3', '.wav', '.m4a', '.flac') && File.exist?(prompt.strip)
52
+ AIA.client.transcribe(prompt.strip)
53
+ else
54
+ AIA.client.chat(prompt) # Fall back to regular chat
55
+ end
56
+ else
57
+ AIA.client.chat(prompt)
58
+ end
59
+ end
60
+
61
+
62
+ def maybe_change_model
63
+ if AIA.client.model != AIA.config.model
64
+ AIA.client = AIClientAdapter.new
65
+ end
66
+ end
67
+
68
+
69
+ def output_response(response)
70
+ speak(response)
71
+
72
+ # Only output to STDOUT if we're in chat mode
73
+
74
+ if AIA.chat? || 'STDOUT' == AIA.config.out_file.upcase
75
+ print "\nAI:\n "
76
+ puts response
77
+ else
78
+ mode = AIA.append? ? 'a' : 'w'
79
+ File.open(AIA.config.out_file, mode) do |file|
80
+ file.puts response
81
+ end
82
+ end
83
+
84
+ if AIA.config.log_file
85
+ File.open(AIA.config.log_file, 'a') do |f|
86
+ f.puts "=== #{Time.now} ==="
87
+ f.puts "Prompt: #{AIA.config.prompt_id}"
88
+ f.puts "Response: #{response}"
89
+ f.puts "==="
90
+ end
91
+ end
92
+ end
93
+
94
+
95
+
96
+ def process_next_prompts(response, prompt_handler)
97
+ if @directive_processor.directive?(response)
98
+ directive_result = @directive_processor.process(response, @history_manager.history)
99
+ response = directive_result[:result]
100
+ @history_manager.history = directive_result[:modified_history] if directive_result[:modified_history]
101
+ end
102
+ end
103
+
104
+
105
+ def determine_operation_type(model)
106
+ model = model.to_s.downcase
107
+ if model.include?('dall') || model.include?('image-generation')
108
+ :text_to_image
109
+ elsif model.include?('vision') || model.include?('image')
110
+ :image_to_text
111
+ elsif model.include?('whisper') || model.include?('audio')
112
+ :audio_to_text
113
+ elsif model.include?('speech') || model.include?('tts')
114
+ :text_to_audio
115
+ else
116
+ :text_to_text
117
+ end
118
+ end
119
+ end
120
+ end
data/lib/aia/config.rb CHANGED
@@ -1,7 +1,476 @@
1
- # aia/lib/aia/config.rb
1
+ # lib/aia/config.rb
2
+ #
3
+ # This file contains the configuration settings for the AIA application.
4
+ # The Config class is responsible for managing configuration settings
5
+ # for the AIA application. It provides methods to parse command-line
6
+ # arguments, environment variables, and configuration files.
2
7
 
3
- require 'hashie'
8
+ require 'yaml'
9
+ require 'toml-rb'
10
+ require 'erb'
11
+ require 'optparse'
4
12
 
5
- class AIA::Config < Hashie::Mash
6
- disable_warnings
13
+
14
+ module AIA
15
+ class Config
16
+ DEFAULT_CONFIG = {
17
+ aia_dir: File.join(ENV['HOME'], '.aia'),
18
+ config_file: File.join(ENV['HOME'], '.aia', 'config.yml'),
19
+ out_file: 'temp.md',
20
+ log_file: File.join(ENV['HOME'], '.prompts', '_prompts.log'),
21
+ prompts_dir: File.join(ENV['HOME'], '.prompts'),
22
+ roles_prefix: 'roles',
23
+ roles_dir: File.join(ENV['HOME'], '.prompts', 'roles'),
24
+ role: '',
25
+ system_prompt: '',
26
+
27
+ # Flags
28
+ markdown: true,
29
+ shell: false,
30
+ erb: false,
31
+ chat: false,
32
+ clear: false,
33
+ terse: false,
34
+ verbose: false,
35
+ debug: $DEBUG_ME,
36
+ fuzzy: false,
37
+ speak: false,
38
+ append: false, # Default to not append to existing out_file
39
+
40
+ # workflow
41
+ next: nil,
42
+ pipeline: [],
43
+
44
+ # PromptManager::Prompt Tailoring
45
+
46
+ parameter_regex: PromptManager::Prompt.parameter_regex.to_s,
47
+
48
+ # LLM tuning parameters
49
+ temperature: 0.7,
50
+ max_tokens: 2048,
51
+ top_p: 1.0,
52
+ frequency_penalty: 0.0,
53
+ presence_penalty: 0.0,
54
+ image_size: '1024x1024',
55
+ image_quality: 'standard',
56
+ image_style: 'vivid',
57
+ model: 'gpt-4o-mini',
58
+ speech_model: 'tts-1',
59
+ transcription_model: 'whisper-1',
60
+ voice: 'alloy',
61
+
62
+ # Embedding parameters
63
+ embedding_model: 'text-embedding-ada-002',
64
+
65
+ # Default speak command
66
+ speak_command: 'afplay', # 'afplay' for audio files
67
+
68
+ # Ruby libraries to require for Ruby binding
69
+ require_libs: [],
70
+ }.freeze
71
+
72
+
73
+ def self.setup
74
+ default_config = OpenStruct.new(DEFAULT_CONFIG)
75
+ cli_config = cli_options
76
+ envar_config = envar_options(default_config, cli_config)
77
+
78
+ file = envar_config.config_file unless envar_config.config_file.nil?
79
+ file = cli_config.config_file unless cli_config.config_file.nil?
80
+
81
+ cf_config = cf_options(file)
82
+
83
+ config = OpenStruct.merge(
84
+ default_config,
85
+ cf_config || {},
86
+ envar_config || {},
87
+ cli_config || {}
88
+ )
89
+
90
+ tailor_the_config(config)
91
+ end
92
+
93
+
94
+ def self.tailor_the_config(config)
95
+ unless config.role.empty?
96
+ unless config.roles_prefix.empty?
97
+ unless config.role.start_with?(config.roles_prefix)
98
+ config.role.prepend "#{config.roles_prefix}/"
99
+ end
100
+ end
101
+ end
102
+
103
+ config.roles_dir ||= File.join(config.prompts_dir, config.roles_prefix)
104
+
105
+ if config.prompt_id.nil? || config.prompt_id.empty?
106
+ if !config.role.nil? || !config.role.empty?
107
+ config.prompt_id = config.role
108
+ config.role = ''
109
+ end
110
+ end
111
+
112
+ if config.fuzzy && config.prompt_id.empty?
113
+ # When fuzzy search is enabled but no prompt ID is provided,
114
+ # set a special value to trigger fuzzy search without an initial query
115
+ # SMELL: This feels like a cludge
116
+ config.prompt_id = '__FUZZY_SEARCH__'
117
+ end
118
+
119
+ unless [TrueClass, FalseClass].include?(config.chat.class)
120
+ if config.chat.nil? || config.chat.empty?
121
+ config.chat = false
122
+ else
123
+ config.chat = true
124
+ end
125
+ end
126
+
127
+ unless [TrueClass, FalseClass].include?(config.fuzzy.class)
128
+ if config.fuzzy.nil? || config.fuzzy.empty?
129
+ config.fuzzy = false
130
+ else
131
+ config.fuzzy = true
132
+ end
133
+ end
134
+
135
+ and_exit = false
136
+
137
+ if config.completion
138
+ generate_completion_script(config.completion)
139
+ and_exit = true
140
+ end
141
+
142
+ if config.dump_file
143
+ dump_config(config, config.dump_file)
144
+ and_exit = true
145
+ end
146
+
147
+ exit if and_exit
148
+
149
+ # Only require a prompt_id if we're not in chat mode and not using fuzzy search
150
+ if !config.chat && !config.fuzzy && config.prompt_id.empty?
151
+ STDERR.puts "Error: A prompt ID is required unless using --chat or --fuzzy. Use -h or --help for help."
152
+ exit 1
153
+ end
154
+
155
+ # Tailor the PromptManager::Prompt
156
+ if config.parameter_regex
157
+ PromptManager::Prompt.parameter_regex = Regexp.new(config.parameter_regex)
158
+ end
159
+
160
+ config
161
+ end
162
+
163
+ # envar values are always String object so need other config
164
+ # layers to know the prompter type for each key's value
165
+ def self.envar_options(default, cli_config)
166
+ config = OpenStruct.merge(default, cli_config)
167
+ envars = ENV.keys.select { |key, _| key.start_with?('AIA_') }
168
+ envars.each do |envar|
169
+ key = envar.sub(/^AIA_/, '').downcase.to_sym
170
+ value = ENV[envar]
171
+
172
+ value = case config[key]
173
+ when TrueClass, FalseClass
174
+ value.downcase == 'true'
175
+ when Integer
176
+ value.to_i
177
+ when Float
178
+ value.to_f
179
+ when Array
180
+ value.split(',').map(&:strip)
181
+ else
182
+ value # defaults to String
183
+ end
184
+ config[key] = value
185
+ end
186
+
187
+ config
188
+ end
189
+
190
+
191
+ def self.cli_options
192
+ config = OpenStruct.new
193
+
194
+ opt_parser = OptionParser.new do |opts|
195
+ opts.banner = "Usage: aia [options] PROMPT_ID [CONTEXT_FILE]*"
196
+
197
+ opts.on("--chat", "Begin a chat session with the LLM after the initial prompt response; will set --no-out_file so that the LLM response comes to STDOUT.") do
198
+ config.chat = true
199
+ puts "Debug: Setting chat mode to true" if config.debug
200
+ end
201
+
202
+ opts.on("-m MODEL", "--model MODEL", "Name of the LLM model to use") do |model|
203
+ config.model = model
204
+ end
205
+
206
+ opts.on("--shell", "Enables `aia` to access your terminal's shell environment from inside the prompt text, allowing for dynamic content insertion using system environment variables and shell commands. Includes safety features to confirm or block dangerous commands.") do
207
+ config.shell = true
208
+ end
209
+
210
+ opts.on("--erb", "Turns the prompt text file into a fully functioning ERB template, allowing for embedded Ruby code processing within the prompt text. This enables dynamic content generation and complex logic within prompts.") do
211
+ config.erb = true
212
+ end
213
+
214
+ opts.on("--terse", "Add terse instruction to prompt") do
215
+ config.terse = true
216
+ end
217
+
218
+ opts.on("-c", "--config_file FILE", "Load config file") do |file|
219
+ if File.exist?(file)
220
+ ext = File.extname(file).downcase
221
+ content = File.read(file)
222
+
223
+ # Process ERB if filename ends with .erb
224
+ if file.end_with?('.erb')
225
+ content = ERB.new(content).result
226
+ file = file.chomp('.erb')
227
+ File.write(file, content)
228
+ end
229
+
230
+ file_config = case ext
231
+ when '.yml', '.yaml'
232
+ YAML.safe_load(content, permitted_classes: [Symbol], symbolize_names: true)
233
+ when '.toml'
234
+ TomlRB.parse(content)
235
+ else
236
+ raise "Unsupported config file format: #{ext}"
237
+ end
238
+
239
+ file_config.each do |key, value|
240
+ config[key.to_sym] = value
241
+ end
242
+ else
243
+ raise "Config file not found: #{file}"
244
+ end
245
+ end
246
+
247
+ opts.on("-p", "--prompts_dir DIR", "Directory containing prompt files") do |dir|
248
+ config.prompts_dir = dir
249
+ end
250
+
251
+ opts.on("--roles_prefix PREFIX", "Subdirectory name for role files (default: roles)") do |prefix|
252
+ config.roles_prefix = prefix
253
+ end
254
+
255
+ opts.on("-r", "--role ROLE_ID", "Role ID to prepend to prompt") do |role|
256
+ config.role = role
257
+ end
258
+
259
+ opts.on('--regex pattern', 'Regex pattern to extract parameters from prompt text') do |pattern|
260
+ config.parameter_regex = pattern
261
+ end
262
+
263
+ opts.on("-o", "--[no-]out_file [FILE]", "Output file (default: temp.md)") do |file|
264
+ config.out_file = file ? File.expand_path(file, Dir.pwd) : 'temp.md'
265
+ end
266
+
267
+ opts.on("-a", "--[no-]append", "Append to output file instead of overwriting") do |append|
268
+ config.append = append
269
+ end
270
+
271
+ opts.on("-l", "--[no-]log_file [FILE]", "Log file") do |file|
272
+ config.log_file = file
273
+ end
274
+
275
+ opts.on("--md", "--[no-]markdown", "Format with Markdown") do |md|
276
+ config.markdown = md
277
+ end
278
+
279
+ opts.on("-n", "--next PROMPT_ID", "Next prompt to process") do |next_prompt|
280
+ config.next = next_prompt
281
+ end
282
+
283
+ opts.on("--pipeline PROMPTS", "Pipeline of prompts to process") do |pipeline|
284
+ config.pipeline = pipeline.split(',')
285
+ end
286
+
287
+ opts.on("-f", "--fuzzy", "Use fuzzy matching for prompt search") do
288
+ unless system("which fzf > /dev/null 2>&1")
289
+ STDERR.puts "Error: 'fzf' is not installed. Please install 'fzf' to use the --fuzzy option."
290
+ exit 1
291
+ end
292
+ config.fuzzy = true
293
+ end
294
+
295
+ opts.on("-d", "--debug", "Enable debug output") do
296
+ config.debug = $DEBUG_ME = true
297
+ end
298
+
299
+ opts.on("--no-debug", "Disable debug output") do
300
+ config.debug = $DEBUG_ME = false
301
+ end
302
+
303
+ opts.on("-v", "--verbose", "Be verbose") do
304
+ config.verbose = true
305
+ end
306
+
307
+ opts.on("--speak", "Simple implementation. Uses the speech model to convert text to audio, then plays the audio. Fun with --chat. Supports configuration of speech model and voice.") do
308
+ config.speak = true
309
+ end
310
+
311
+ opts.on("--voice VOICE", "Voice to use for speech") do |voice|
312
+ config.voice = voice
313
+ end
314
+
315
+ opts.on("--sm", "--speech_model MODEL", "Speech model to use") do |model|
316
+ config.speech_model = model
317
+ end
318
+
319
+ opts.on("--tm", "--transcription_model MODEL", "Transcription model to use") do |model|
320
+ config.transcription_model = model
321
+ end
322
+
323
+ opts.on("--is", "--image_size SIZE", "Image size for image generation") do |size|
324
+ config.image_size = size
325
+ end
326
+
327
+ opts.on("--iq", "--image_quality QUALITY", "Image quality for image generation") do |quality|
328
+ config.image_quality = quality
329
+ end
330
+
331
+ opts.on("--style", "--image_style STYLE", "Style for image generation") do |style|
332
+ config.image_style = style
333
+ end
334
+
335
+ opts.on("--system_prompt PROMPT_ID", "System prompt ID to use for chat sessions") do |prompt_id|
336
+ config.system_prompt = prompt_id
337
+ end
338
+
339
+ # AI model parameters
340
+ opts.on("-t", "--temperature TEMP", Float, "Temperature for text generation") do |temp|
341
+ config.temperature = temp
342
+ end
343
+
344
+ opts.on("--max_tokens TOKENS", Integer, "Maximum tokens for text generation") do |tokens|
345
+ config.max_tokens = tokens
346
+ end
347
+
348
+ opts.on("--top_p VALUE", Float, "Top-p sampling value") do |value|
349
+ config.top_p = value
350
+ end
351
+
352
+ opts.on("--frequency_penalty VALUE", Float, "Frequency penalty") do |value|
353
+ config.frequency_penalty = value
354
+ end
355
+
356
+ opts.on("--presence_penalty VALUE", Float, "Presence penalty") do |value|
357
+ config.presence_penalty = value
358
+ end
359
+
360
+ opts.on("--dump FILE", "Dump config to file") do |file|
361
+ config.dump_file = file
362
+ end
363
+
364
+ opts.on("--completion SHELL", "Show completion script for bash|zsh|fish - default is nil") do |shell|
365
+ config.completion = shell
366
+ end
367
+
368
+ opts.on("--version", "Show version") do
369
+ puts AIA::VERSION
370
+ exit
371
+ end
372
+
373
+ opts.on("-h", "--help", "Prints this help") do
374
+ puts opts
375
+ exit
376
+ end
377
+
378
+ opts.on("--rq LIBS", "Ruby libraries to require for Ruby directive") do |libs|
379
+ config.require_libs = libs.split(',')
380
+ end
381
+ end
382
+
383
+ args = ARGV.dup
384
+
385
+ # Parse the command line arguments
386
+ begin
387
+ remaining_args = opt_parser.parse(args)
388
+ rescue OptionParser::InvalidOption => e
389
+ puts e.message
390
+ puts opt_parser
391
+ exit 1
392
+ end
393
+
394
+ # First remaining arg is the prompt ID
395
+ unless remaining_args.empty?
396
+ config.prompt_id = remaining_args.shift
397
+ end
398
+
399
+ # Remaining args are context files
400
+ config.context_files = remaining_args unless remaining_args.empty?
401
+
402
+
403
+ config
404
+ end
405
+
406
+
407
+ def self.cf_options(file)
408
+ config = OpenStruct.new
409
+
410
+ if File.exist?(file)
411
+ ext = File.extname(file).downcase
412
+ content = File.read(file)
413
+
414
+ # Process ERB if filename ends with .erb
415
+ if file.end_with?('.erb')
416
+ content = ERB.new(content).result
417
+ file = file.chomp('.erb')
418
+ File.write(file, content)
419
+ end
420
+
421
+ file_config = case ext
422
+ when '.yml', '.yaml'
423
+ YAML.safe_load(content, permitted_classes: [Symbol], symbolize_names: true)
424
+ when '.toml'
425
+ TomlRB.parse(content)
426
+ else
427
+ raise "Unsupported config file format: #{ext}"
428
+ end
429
+
430
+ file_config.each do |key, value|
431
+ config[key] = value
432
+ end
433
+ else
434
+ STDERR.puts "WARNING:Config file not found: #{file}"
435
+ end
436
+
437
+ config
438
+ end
439
+
440
+
441
+ def self.generate_completion_script(shell)
442
+ script_path = File.join(File.dirname(__FILE__), "aia_completion.#{shell}")
443
+
444
+ if File.exist?(script_path)
445
+ puts File.read(script_path)
446
+ else
447
+ STDERR.puts "ERROR: The shell '#{shell}' is not supported or the completion script is missing."
448
+ end
449
+ end
450
+
451
+
452
+ def self.dump_config(config, file)
453
+ # Implementation for config dump
454
+ ext = File.extname(file).downcase
455
+ config_hash = config.to_h
456
+
457
+ # Remove non-serializable objects
458
+ config_hash.delete_if { |_, v| !v.nil? && !v.is_a?(String) && !v.is_a?(Numeric) && !v.is_a?(TrueClass) && !v.is_a?(FalseClass) && !v.is_a?(Array) && !v.is_a?(Hash) }
459
+
460
+ # Remove dump_file key to prevent automatic exit on next load
461
+ config_hash.delete(:dump_file)
462
+
463
+ content = case ext
464
+ when '.yml', '.yaml'
465
+ YAML.dump(config_hash)
466
+ when '.toml'
467
+ TomlRB.dump(config_hash)
468
+ else
469
+ raise "Unsupported config file format: #{ext}"
470
+ end
471
+
472
+ File.write(file, content)
473
+ puts "Config successfully dumped to #{file}"
474
+ end
475
+ end
7
476
  end
@@ -0,0 +1,58 @@
1
+ # lib/aia/context_manager.rb
2
+
3
+ module AIA
4
+ # Manages the conversation context for chat sessions.
5
+ class ContextManager
6
+ attr_reader :context
7
+
8
+ # Initializes the ContextManager with an optional system prompt.
9
+ def initialize(system_prompt: nil)
10
+ @context = []
11
+ add_system_prompt(system_prompt) if system_prompt && !system_prompt.strip.empty?
12
+ end
13
+
14
+ # Adds a message to the conversation context.
15
+ #
16
+ # @param role [String] The role of the message sender ('user' or 'assistant').
17
+ # @param content [String] The content of the message.
18
+ def add_to_context(role:, content:)
19
+ @context << { role: role, content: content }
20
+ end
21
+
22
+ # Returns the current conversation context.
23
+ # Optionally adds the system prompt if it wasn't added during initialization
24
+ # or needs to be re-added after clearing.
25
+ #
26
+ # @param system_prompt [String, nil] The system prompt to potentially prepend.
27
+ # @return [Array<Hash>] The conversation context array.
28
+ def get_context(system_prompt: nil)
29
+ # Ensure system prompt is present if provided and not already the first message
30
+ if system_prompt && !system_prompt.strip.empty? && (@context.empty? || @context.first[:role] != 'system')
31
+ add_system_prompt(system_prompt)
32
+ end
33
+ @context
34
+ end
35
+
36
+ # Clears the conversation context, optionally keeping the system prompt.
37
+ #
38
+ # @param keep_system_prompt [Boolean] Whether to retain the initial system prompt.
39
+ def clear_context(keep_system_prompt: true)
40
+ if keep_system_prompt && !@context.empty? && @context.first[:role] == 'system'
41
+ @context = [@context.first]
42
+ else
43
+ @context = []
44
+ AIA.config.client.clear_context
45
+ end
46
+ end
47
+
48
+ private
49
+
50
+ # Adds or replaces the system prompt at the beginning of the context.
51
+ def add_system_prompt(system_prompt)
52
+ # Remove existing system prompt if present
53
+ @context.shift if !@context.empty? && @context.first[:role] == 'system'
54
+ # Add the new system prompt at the beginning
55
+ @context.unshift({ role: 'system', content: system_prompt })
56
+ end
57
+ end
58
+ end