askcii 0.1.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/lib/askcii/cli.rb ADDED
@@ -0,0 +1,99 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'optparse'
4
+
5
+ module Askcii
6
+ class CLI
7
+ attr_reader :options, :prompt
8
+
9
+ def initialize(args = ARGV.dup)
10
+ @args = args
11
+ @options = {}
12
+ @prompt = nil
13
+ end
14
+
15
+ def parse!
16
+ option_parser.parse!(@args)
17
+ @prompt = @args.join(' ')
18
+ self
19
+ end
20
+
21
+ def show_help?
22
+ @options[:help]
23
+ end
24
+
25
+ def show_usage?
26
+ false # Usage logic is now handled in Application class
27
+ end
28
+
29
+ def configure?
30
+ @options[:configure]
31
+ end
32
+
33
+ def last_response?
34
+ @options[:last_response]
35
+ end
36
+
37
+ def private?
38
+ @options[:private]
39
+ end
40
+
41
+ def model_config_id
42
+ @options[:model_config_id]
43
+ end
44
+
45
+ def help_message
46
+ option_parser.to_s
47
+ end
48
+
49
+ def usage_message
50
+ <<~USAGE
51
+ Usage:
52
+ askcii [options] 'Your prompt here'
53
+ echo 'Your prompt here' | askcii # Use piped text as prompt
54
+ echo 'Context text' | askcii 'Your prompt here' # Use piped text as context
55
+ askcii 'Your prompt here' < prompt.txt # Use file content as context
56
+ cat prompt.txt | askcii # Use file content as prompt
57
+ askcii -p (start a private session)
58
+ askcii -r (to get the last response)
59
+ askcii -c (manage configurations)
60
+ askcii -m 2 (use configuration ID 2)
61
+
62
+ Options:
63
+ -p, --private Start a private session and do not record
64
+ -r, --last-response Output the last response
65
+ -c, --configure Manage configurations
66
+ -m, --model ID Use specific configuration ID
67
+ -h, --help Show help
68
+ USAGE
69
+ end
70
+
71
+ private
72
+
73
+ def option_parser
74
+ @option_parser ||= OptionParser.new do |opts|
75
+ opts.banner = "Usage: askcii [options] 'Your prompt here'"
76
+
77
+ opts.on('-p', '--private', 'Start a private session and do not record') do
78
+ @options[:private] = true
79
+ end
80
+
81
+ opts.on('-r', '--last-response', 'Output the last response') do
82
+ @options[:last_response] = true
83
+ end
84
+
85
+ opts.on('-c', '--configure', 'Manage configurations') do
86
+ @options[:configure] = true
87
+ end
88
+
89
+ opts.on('-m', '--model ID', 'Use specific configuration ID') do |model_id|
90
+ @options[:model_config_id] = model_id
91
+ end
92
+
93
+ opts.on('-h', '--help', 'Show this help message') do
94
+ @options[:help] = true
95
+ end
96
+ end
97
+ end
98
+ end
99
+ end
@@ -0,0 +1,285 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Askcii
4
+ class ConfigurationManager
5
+ PROVIDER_MAP = {
6
+ '1' => 'openai',
7
+ '2' => 'anthropic',
8
+ '3' => 'gemini',
9
+ '4' => 'deepseek',
10
+ '5' => 'openrouter',
11
+ '6' => 'ollama'
12
+ }.freeze
13
+
14
+ DEFAULT_ENDPOINTS = {
15
+ 'openai' => 'https://api.openai.com/v1',
16
+ 'anthropic' => 'https://api.anthropic.com',
17
+ 'gemini' => 'https://generativelanguage.googleapis.com/v1',
18
+ 'deepseek' => 'https://api.deepseek.com/v1',
19
+ 'openrouter' => 'https://openrouter.ai/api/v1',
20
+ 'ollama' => 'http://localhost:11434/v1'
21
+ }.freeze
22
+
23
+ PROVIDER_MODELS = {
24
+ 'openai' => {
25
+ default: 'gpt-4o',
26
+ models: [
27
+ 'gpt-4o',
28
+ 'gpt-4o-mini',
29
+ 'gpt-4-turbo',
30
+ 'gpt-4',
31
+ 'gpt-3.5-turbo'
32
+ ]
33
+ },
34
+ 'anthropic' => {
35
+ default: 'claude-3-5-sonnet-20241022',
36
+ models: [
37
+ 'claude-3-5-sonnet-20241022',
38
+ 'claude-3-5-haiku-20241022',
39
+ 'claude-3-opus-20240229',
40
+ 'claude-3-sonnet-20240229',
41
+ 'claude-3-haiku-20240307'
42
+ ]
43
+ },
44
+ 'gemini' => {
45
+ default: 'gemini-pro',
46
+ models: [
47
+ 'gemini-pro',
48
+ 'gemini-pro-vision',
49
+ 'gemini-1.5-pro',
50
+ 'gemini-1.5-flash'
51
+ ]
52
+ },
53
+ 'deepseek' => {
54
+ default: 'deepseek-chat',
55
+ models: %w[
56
+ deepseek-chat
57
+ deepseek-coder
58
+ ]
59
+ },
60
+ 'openrouter' => {
61
+ default: 'anthropic/claude-3.5-sonnet',
62
+ models: [
63
+ 'anthropic/claude-3.5-sonnet',
64
+ 'openai/gpt-4o',
65
+ 'google/gemini-pro',
66
+ 'meta-llama/llama-3.1-405b-instruct',
67
+ 'anthropic/claude-3-opus',
68
+ 'openai/gpt-4-turbo'
69
+ ]
70
+ },
71
+ 'ollama' => {
72
+ default: 'llama3.2',
73
+ models: [
74
+ 'llama3.2',
75
+ 'llama3.1',
76
+ 'mistral',
77
+ 'codellama',
78
+ 'phi3',
79
+ 'gemma2'
80
+ ]
81
+ }
82
+ }.freeze
83
+
84
+ def run
85
+ show_current_configurations
86
+ show_menu
87
+ handle_user_choice
88
+ end
89
+
90
+ private
91
+
92
+ def show_current_configurations
93
+ puts 'Configuration Management'
94
+ puts '======================'
95
+
96
+ configs = Askcii::Config.configurations
97
+ default_id = Askcii::Config.default_configuration_id
98
+
99
+ if configs.empty?
100
+ puts 'No configurations found.'
101
+ else
102
+ puts 'Current configurations:'
103
+ configs.each do |config|
104
+ marker = config['id'] == default_id ? ' (default)' : ''
105
+ provider_info = config['provider'] ? " [#{config['provider']}]" : ''
106
+ puts " #{config['id']}. #{config['name']}#{provider_info}#{marker}"
107
+ end
108
+ puts
109
+ end
110
+ end
111
+
112
+ def show_menu
113
+ puts 'Options:'
114
+ puts ' 1. Add new configuration'
115
+ puts ' 2. Set default configuration'
116
+ puts ' 3. Delete configuration'
117
+ puts ' 4. Exit'
118
+ print 'Select option (1-4): '
119
+ end
120
+
121
+ def handle_user_choice
122
+ choice = $stdin.gets.chomp
123
+
124
+ case choice
125
+ when '1'
126
+ add_new_configuration
127
+ when '2'
128
+ set_default_configuration
129
+ when '3'
130
+ delete_configuration
131
+ when '4'
132
+ puts 'Exiting.'
133
+ else
134
+ puts 'Invalid option.'
135
+ end
136
+ end
137
+
138
+ def add_new_configuration
139
+ print 'Enter configuration name: '
140
+ name = $stdin.gets.chomp
141
+
142
+ provider = select_provider
143
+ return unless provider
144
+
145
+ api_key = get_api_key(provider)
146
+ return unless api_key || provider == 'ollama'
147
+
148
+ endpoint = get_api_endpoint(provider)
149
+ model_id = get_model_id(provider)
150
+
151
+ return unless model_id
152
+
153
+ name = model_id if name.empty?
154
+ Askcii::Config.add_configuration(name, api_key || '', endpoint, model_id, provider)
155
+ puts 'Configuration added successfully!'
156
+ end
157
+
158
+ def select_provider
159
+ puts 'Select provider:'
160
+ puts ' 1. OpenAI'
161
+ puts ' 2. Anthropic'
162
+ puts ' 3. Gemini'
163
+ puts ' 4. DeepSeek'
164
+ puts ' 5. OpenRouter'
165
+ puts ' 6. Ollama (no API key needed)'
166
+ print 'Provider (1-6): '
167
+
168
+ provider_choice = $stdin.gets.chomp
169
+ provider = PROVIDER_MAP[provider_choice]
170
+
171
+ if provider.nil?
172
+ puts 'Invalid provider selection.'
173
+ return nil
174
+ end
175
+
176
+ provider
177
+ end
178
+
179
+ def get_api_key(provider)
180
+ return '' if provider == 'ollama'
181
+
182
+ print "Enter #{provider.capitalize} API key: "
183
+ api_key = $stdin.gets.chomp
184
+
185
+ if api_key.empty?
186
+ puts 'API key is required for this provider.'
187
+ return nil
188
+ end
189
+
190
+ api_key
191
+ end
192
+
193
+ def get_api_endpoint(provider)
194
+ default_endpoint = DEFAULT_ENDPOINTS[provider]
195
+ print "Enter API endpoint (default: #{default_endpoint}): "
196
+ api_endpoint = $stdin.gets.chomp
197
+ api_endpoint.empty? ? default_endpoint : api_endpoint
198
+ end
199
+
200
+ def get_model_id(provider)
201
+ provider_config = PROVIDER_MODELS[provider]
202
+
203
+ if provider_config
204
+ default_model = provider_config[:default]
205
+ available_models = provider_config[:models]
206
+
207
+ puts "\nAvailable models for #{provider.capitalize}:"
208
+ available_models.each_with_index do |model, index|
209
+ marker = model == default_model ? ' (recommended)' : ''
210
+ puts " #{index + 1}. #{model}#{marker}"
211
+ end
212
+
213
+ puts " #{available_models.length + 1}. Enter custom model ID"
214
+ print "\nSelect model (1-#{available_models.length + 1}) or press Enter for default [#{default_model}]: "
215
+
216
+ choice = $stdin.gets.chomp
217
+
218
+ if choice.empty?
219
+ default_model
220
+ elsif choice.to_i.between?(1, available_models.length)
221
+ available_models[choice.to_i - 1]
222
+ elsif choice.to_i == available_models.length + 1
223
+ print 'Enter custom model ID: '
224
+ custom_model = $stdin.gets.chomp
225
+ custom_model.empty? ? nil : custom_model
226
+ else
227
+ puts 'Invalid selection.'
228
+ nil
229
+ end
230
+ else
231
+ # Fallback for unknown providers
232
+ print 'Enter model ID: '
233
+ model_id = $stdin.gets.chomp
234
+
235
+ if model_id.empty?
236
+ puts 'Model ID is required.'
237
+ return nil
238
+ end
239
+
240
+ model_id
241
+ end
242
+ end
243
+
244
+ def set_default_configuration
245
+ configs = Askcii::Config.configurations
246
+
247
+ if configs.empty?
248
+ puts 'No configurations available to set as default.'
249
+ return
250
+ end
251
+
252
+ print 'Enter configuration ID to set as default: '
253
+ new_default = $stdin.gets.chomp
254
+
255
+ if configs.any? { |c| c['id'] == new_default }
256
+ Askcii::Config.set_default_configuration(new_default)
257
+ puts "Configuration #{new_default} set as default."
258
+ else
259
+ puts 'Invalid configuration ID.'
260
+ end
261
+ end
262
+
263
+ def delete_configuration
264
+ configs = Askcii::Config.configurations
265
+
266
+ if configs.empty?
267
+ puts 'No configurations available to delete.'
268
+ return
269
+ end
270
+
271
+ print 'Enter configuration ID to delete: '
272
+ delete_id = $stdin.gets.chomp
273
+
274
+ if configs.any? { |c| c['id'] == delete_id }
275
+ if Askcii::Config.delete_configuration(delete_id)
276
+ puts "Configuration #{delete_id} deleted successfully."
277
+ else
278
+ puts 'Failed to delete configuration.'
279
+ end
280
+ else
281
+ puts 'Invalid configuration ID.'
282
+ end
283
+ end
284
+ end
285
+ end
@@ -5,9 +5,12 @@ module Askcii
5
5
  one_to_many :messages, class: 'Askcii::Message', key: :chat_id
6
6
 
7
7
  def to_llm
8
+ current_config = Askcii::Config.current_configuration
9
+ provider_symbol = current_config['provider'] ? current_config['provider'].to_sym : :openai
10
+
8
11
  @chat = RubyLLM.chat(
9
12
  model: model_id,
10
- provider: :openai,
13
+ provider: provider_symbol,
11
14
  assume_model_exists: true
12
15
  )
13
16
  messages.each do |msg|
@@ -1,5 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ require 'json'
4
+
3
5
  module Askcii
4
6
  class Config < Sequel::Model(Askcii.database[:configs])
5
7
  def self.set(key, value)
@@ -9,9 +11,10 @@ module Askcii
9
11
 
10
12
  def self.get(key)
11
13
  config = find(key: key)
12
- config ? config.value : nil
14
+ config&.value
13
15
  end
14
16
 
17
+ # Legacy methods for backward compatibility
15
18
  def self.api_key
16
19
  get('api_key')
17
20
  end
@@ -23,5 +26,89 @@ module Askcii
23
26
  def self.model_id
24
27
  get('model_id')
25
28
  end
29
+
30
+ # New multi-configuration methods
31
+ def self.configurations
32
+ where(Sequel.like(:key, 'config_%')).map do |config|
33
+ config_data = JSON.parse(config.value)
34
+ config_data.merge('id' => config.key.split('_', 2)[1])
35
+ end
36
+ rescue JSON::ParserError
37
+ []
38
+ end
39
+
40
+ def self.add_configuration(name, api_key, api_endpoint, model_id, provider)
41
+ config_data = {
42
+ 'name' => name,
43
+ 'api_key' => api_key,
44
+ 'api_endpoint' => api_endpoint,
45
+ 'model_id' => model_id,
46
+ 'provider' => provider
47
+ }
48
+
49
+ # Find the next available ID
50
+ existing_ids = configurations.map { |c| c['id'].to_i }.sort
51
+ next_id = existing_ids.empty? ? 1 : existing_ids.last + 1
52
+
53
+ set("config_#{next_id}", config_data.to_json)
54
+ end
55
+
56
+ def self.get_configuration(id)
57
+ config = get("config_#{id}")
58
+ return nil unless config
59
+
60
+ JSON.parse(config)
61
+ rescue JSON::ParserError
62
+ nil
63
+ end
64
+
65
+ def self.default_configuration_id
66
+ get('default_config_id') || '1'
67
+ end
68
+
69
+ def self.set_default_configuration(id)
70
+ set('default_config_id', id.to_s)
71
+ end
72
+
73
+ def self.delete_configuration(id)
74
+ config = find(key: "config_#{id}")
75
+ return false unless config
76
+
77
+ # Check if this is the default configuration
78
+ if default_configuration_id == id.to_s
79
+ # Reset default to the first remaining configuration
80
+ remaining_configs = configurations.reject { |c| c['id'] == id.to_s }
81
+ if remaining_configs.any?
82
+ set_default_configuration(remaining_configs.first['id'])
83
+ else
84
+ # If no configurations remain, clear the default
85
+ config_record = find(key: 'default_config_id')
86
+ config_record&.delete
87
+ end
88
+ end
89
+
90
+ config.delete
91
+ true
92
+ end
93
+
94
+ def self.current_configuration
95
+ default_id = default_configuration_id
96
+ config = get_configuration(default_id)
97
+
98
+ # Fallback to legacy configuration if no multi-configs exist
99
+ if config.nil? && configurations.empty?
100
+ {
101
+ 'api_key' => api_key,
102
+ 'api_endpoint' => api_endpoint,
103
+ 'model_id' => model_id,
104
+ 'provider' => 'openai'
105
+ }
106
+ else
107
+ # Ensure provider is set for backward compatibility
108
+ config ||= {}
109
+ config['provider'] ||= 'openai'
110
+ config
111
+ end
112
+ end
26
113
  end
27
114
  end
@@ -7,7 +7,7 @@ module Askcii
7
7
  def to_llm
8
8
  RubyLLM::Message.new(
9
9
  role: role.to_sym,
10
- content: content,
10
+ content: content.to_s.encode('UTF-8', undef: :replace),
11
11
  tool_calls: {},
12
12
  tool_call_id: nil,
13
13
  input_tokens: input_tokens,
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Askcii
4
- VERSION = '0.1.0'
4
+ VERSION = '0.3.0'
5
5
  end
data/lib/askcii.rb CHANGED
@@ -3,7 +3,6 @@
3
3
  require 'sequel'
4
4
  require 'fileutils'
5
5
  require 'ruby_llm'
6
- require 'ruby_llm/model_info'
7
6
  require_relative './askcii/version'
8
7
 
9
8
  module Askcii
@@ -22,46 +21,75 @@ module Askcii
22
21
 
23
22
  # Initialize the database
24
23
  def self.setup_database
25
- database.create_table :chats do
26
- primary_key :id
27
- String :model_id, null: true
28
- String :context, null: true
29
- Datetime :created_at, null: false, default: Sequel::CURRENT_TIMESTAMP
30
- end unless database.table_exists?(:chats)
24
+ unless database.table_exists?(:chats)
25
+ database.create_table :chats do
26
+ primary_key :id
27
+ String :model_id, null: true
28
+ String :context, null: true
29
+ Datetime :created_at, null: false, default: Sequel::CURRENT_TIMESTAMP
30
+ end
31
+ end
31
32
 
32
- database.create_table :messages do
33
- primary_key :id
34
- foreign_key :chat_id, :chats, null: false
35
- String :role, null: true
36
- Text :content, null: true
37
- String :model_id, null: true
38
- Integer :input_tokens, null: true
39
- Integer :output_tokens, null: true
40
- Datetime :created_at, null: false, default: Sequel::CURRENT_TIMESTAMP
41
- end unless database.table_exists?(:messages)
33
+ unless database.table_exists?(:messages)
34
+ database.create_table :messages do
35
+ primary_key :id
36
+ foreign_key :chat_id, :chats, null: false
37
+ String :role, null: true
38
+ Text :content, null: true
39
+ String :model_id, null: true
40
+ Integer :input_tokens, null: true
41
+ Integer :output_tokens, null: true
42
+ Datetime :created_at, null: false, default: Sequel::CURRENT_TIMESTAMP
43
+ end
44
+ end
45
+
46
+ return if database.table_exists?(:configs)
42
47
 
43
48
  database.create_table :configs do
44
49
  primary_key :id
45
50
  String :key, null: false, unique: true
46
51
  Text :value, null: true
47
- end unless database.table_exists?(:configs)
52
+ end
48
53
  end
49
54
 
50
- def self.configure_llm
55
+ def self.configure_llm(selected_config = nil)
51
56
  RubyLLM.configure do |config|
52
57
  config.log_file = '/dev/null'
53
58
 
54
- # Try to get configuration from the database first, then fallback to ENV variables
55
- config.openai_api_key = begin
56
- Askcii::Config.api_key || ENV['ASKCII_API_KEY'] || 'blank'
57
- rescue StandardError
58
- ENV['ASKCII_API_KEY'] || 'blank'
59
- end
59
+ if selected_config
60
+ provider = selected_config['provider'] || 'openai'
61
+ api_key = selected_config['api_key']
60
62
 
61
- config.openai_api_base = begin
62
- Askcii::Config.api_endpoint || ENV['ASKCII_API_ENDPOINT'] || 'http://localhost:11434/v1'
63
- rescue StandardError
64
- ENV['ASKCII_API_ENDPOINT'] || 'http://localhost:11434/v1'
63
+ # Set the appropriate API key based on provider
64
+ case provider.downcase
65
+ when 'openai'
66
+ config.openai_api_key = api_key || 'blank'
67
+ config.openai_api_base = selected_config['api_endpoint'] || 'https://api.openai.com/v1'
68
+ when 'anthropic'
69
+ config.anthropic_api_key = api_key || 'blank'
70
+ when 'gemini'
71
+ config.gemini_api_key = api_key || 'blank'
72
+ when 'deepseek'
73
+ config.deepseek_api_key = api_key || 'blank'
74
+ when 'openrouter'
75
+ config.openrouter_api_key = api_key || 'blank'
76
+ when 'ollama'
77
+ # Ollama doesn't need an API key
78
+ config.openai_api_base = selected_config['api_endpoint'] || 'http://localhost:11434/v1'
79
+ end
80
+ else
81
+ # Legacy configuration fallback
82
+ config.openai_api_key = begin
83
+ Askcii::Config.api_key || ENV['ASKCII_API_KEY'] || 'blank'
84
+ rescue StandardError
85
+ ENV['ASKCII_API_KEY'] || 'blank'
86
+ end
87
+
88
+ config.openai_api_base = begin
89
+ Askcii::Config.api_endpoint || ENV['ASKCII_API_ENDPOINT'] || 'http://localhost:11434/v1'
90
+ rescue StandardError
91
+ ENV['ASKCII_API_ENDPOINT'] || 'http://localhost:11434/v1'
92
+ end
65
93
  end
66
94
  end
67
95
  end
@@ -71,4 +99,8 @@ module Askcii
71
99
  require_relative './askcii/models/message'
72
100
  require_relative './askcii/models/config'
73
101
  end
102
+
103
+ def self.require_application
104
+ require_relative './askcii/application'
105
+ end
74
106
  end