ruby-openai-cli 1.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/README.MD +23 -0
- data/bin/ruby-openai-cli +3 -0
- data/lib/context.rb +7 -0
- data/lib/prompt.rb +140 -0
- data/lib/version.rb +1 -0
- metadata +135 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 3c5856a8ea31123a8d4e09f28f32bad0ac09a610cb82c980272c72deebc55f77
|
4
|
+
data.tar.gz: c8c0b098bef65ced164868f30cb9956b11238419ab28e4365bc7300bb9dc2dc8
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 455d5a4b2354e8ba320ca8a964d42974f65d5bca5125170e3f2b04fda06210d2c47df11ef009f6f65389c2d2f95410b1466f666c05ed132cf26947469861d254
|
7
|
+
data.tar.gz: 19d6bb2e7f9cabe0ad63378bf0ab9a6bb1f36d9adc7b40e7dda25681569cc59cf091e296f83ede2ce3a2e70a22e68153e710b364fc2f714f4bda8eff69696677
|
data/README.MD
ADDED
@@ -0,0 +1,23 @@
|
|
1
|
+
[![Gem Version](https://badge.fury.io/rb/ruby-openai-cli.svg)](https://badge.fury.io/rb/ruby-openai-cli)
|
2
|
+
|
3
|
+
## Ruby-OpenAI-CLI
|
4
|
+
|
5
|
+
A tool to query OpenAI's GPT APIs from the command line.
|
6
|
+
|
7
|
+
### Usage
|
8
|
+
|
9
|
+
To run the tool, you need an API key from OpenAI, you can get one here:
|
10
|
+
https://platform.openai.com/account/api-keys
|
11
|
+
You will be asked to provide on first use of `ruby-openai-cli`, or you can
|
12
|
+
provide it as `OPENAI_API_KEY` in your environment.
|
13
|
+
|
14
|
+
To install the gem, run: `gem install ruby-openai-cli`. Then you can
|
15
|
+
use the tool by calling `ruby-openai-cli` on your command line.
|
16
|
+
|
17
|
+
![image](https://user-images.githubusercontent.com/582520/229295020-e8aa962c-6a87-45e2-802a-c8d41026a56d.png)
|
18
|
+
|
19
|
+
### Development
|
20
|
+
|
21
|
+
* Install dev dependencies: `gem install awesome_print byebug rubocop`
|
22
|
+
* Build Rubygem: `gem build ruby-openai-cli.gemspec`
|
23
|
+
* Publish Rubygem: `gem push ruby-openai-cli-1.0.gem`
|
data/bin/ruby-openai-cli
ADDED
data/lib/context.rb
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
DEFAULT_CONTEXTS = {
|
2
|
+
"ruby": 'Imagine you are a senior software developer specialized in Ruby, ' \
|
3
|
+
'Ruby on Rails and SQL with 20 years of experience. ' \
|
4
|
+
'You are very proficient at writing high quality, clean, concise and highly ' \
|
5
|
+
'optimized code that follows the standards. You are my personal assistant. ' \
|
6
|
+
'You are here to help me in various tasks and answer my questions.'
|
7
|
+
}
|
data/lib/prompt.rb
ADDED
@@ -0,0 +1,140 @@
|
|
1
|
+
require 'openai'
|
2
|
+
require 'dotenv/load'
|
3
|
+
require 'optparse'
|
4
|
+
require 'tty-markdown' # https://github.com/piotrmurach/tty-markdown
|
5
|
+
require_relative 'context'
|
6
|
+
require_relative 'version'
|
7
|
+
|
8
|
+
CONFIG_DIR = Dir.home + '/.config/ruby-openai-cli'
|
9
|
+
CHATS_DIR = CONFIG_DIR + '/chats'
|
10
|
+
API_KEY_FILE = CONFIG_DIR + '/api_key'
|
11
|
+
CONTEXTS_FILE = CONFIG_DIR + '/contexts.json'
|
12
|
+
|
13
|
+
MODEL = 'gpt-3.5-turbo'
|
14
|
+
|
15
|
+
options = {}
|
16
|
+
bin_name = File.basename($0)
|
17
|
+
OptionParser.new do |opts|
|
18
|
+
opts.banner = "#{bin_name}, version: #{VERSION}. Usage: #{bin_name} [options]"
|
19
|
+
opts.on('-q', '--question QUESTION', 'Provide question as parameter and receive answer to stdout') do |v|
|
20
|
+
options[:q] = v
|
21
|
+
end
|
22
|
+
opts.on('--context CONTEXT', 'Use a context') { |v| options[:context] = v }
|
23
|
+
opts.on('--list-contexts', 'List available contexts') { |_v| options[:lc] = true }
|
24
|
+
opts.on('-d', '--debug', 'Debug mode') { |_v| options[:d] = true }
|
25
|
+
opts.on('-c', '--chat [NAME]', 'Start an interactive conversation (saved as NAME). ' \
|
26
|
+
'You can continue a previous chat by providing the same name') do |name|
|
27
|
+
options[:chat] = true
|
28
|
+
FileUtils.mkdir_p(CHATS_DIR) if name
|
29
|
+
options[:chat_name] = name
|
30
|
+
options[:chat_file] = CHATS_DIR + "/#{name}.json"
|
31
|
+
end
|
32
|
+
opts.on('-l', '--list-chats', 'List stored chats') { |_v| options[:l] = true }
|
33
|
+
end.parse!
|
34
|
+
|
35
|
+
unless ENV.fetch('OPENAI_API_KEY', nil)
|
36
|
+
unless File.exist?(API_KEY_FILE)
|
37
|
+
puts 'To use the OpenAI API, you need to get API key at https://platform.openai.com/account/api-keys.'
|
38
|
+
puts "It will get stored at #{API_KEY_FILE}."
|
39
|
+
puts 'Please enter your API key:'
|
40
|
+
FileUtils.mkdir_p(CONFIG_DIR)
|
41
|
+
key = gets.chomp
|
42
|
+
test_response = OpenAI::Client.new(access_token: key).models.list
|
43
|
+
unless test_response.code == 200
|
44
|
+
puts test_response['error']['message']
|
45
|
+
exit 1
|
46
|
+
end
|
47
|
+
File.write(API_KEY_FILE, key)
|
48
|
+
end
|
49
|
+
ENV['OPENAI_API_KEY'] = File.read(API_KEY_FILE)
|
50
|
+
end
|
51
|
+
|
52
|
+
OpenAI.configure do |config|
|
53
|
+
config.access_token = ENV.fetch('OPENAI_API_KEY')
|
54
|
+
config.request_timeout = 240
|
55
|
+
end
|
56
|
+
|
57
|
+
prompt = ''
|
58
|
+
client = OpenAI::Client.new
|
59
|
+
|
60
|
+
def get_input
|
61
|
+
printf TTY::Markdown.parse('**Your message>**', theme: { strong: %i[yellow bold] })
|
62
|
+
gets.chomp
|
63
|
+
end
|
64
|
+
|
65
|
+
def format_input(input)
|
66
|
+
puts TTY::Markdown.parse("**Your message>**\n" + input + "\n",
|
67
|
+
theme: { strong: %i[yellow bold] })
|
68
|
+
end
|
69
|
+
|
70
|
+
def format_response(response)
|
71
|
+
puts TTY::Markdown.parse("\n**ChatGPT response:**\n" + response + "\n\n",
|
72
|
+
theme: { strong: %i[blue bold] })
|
73
|
+
end
|
74
|
+
|
75
|
+
if options[:lc]
|
76
|
+
puts 'Available contexts: '
|
77
|
+
DEFAULT_CONTEXTS.keys.each do |ctx|
|
78
|
+
puts ctx
|
79
|
+
end
|
80
|
+
exit
|
81
|
+
end
|
82
|
+
|
83
|
+
if options[:l]
|
84
|
+
out = "**Available chats:**\n\n"
|
85
|
+
Dir[CHATS_DIR + '/*'].each do |file|
|
86
|
+
out += "* '#{File.basename(file, '.json')}'\n"
|
87
|
+
end
|
88
|
+
out += "\nYou can continue a chat with: `#{bin_name} -c '<chat name>'`"
|
89
|
+
out += "\nYour chats are stored at `#{CHATS_DIR}`"
|
90
|
+
puts TTY::Markdown.parse(out)
|
91
|
+
exit
|
92
|
+
end
|
93
|
+
|
94
|
+
messages_history = []
|
95
|
+
if options[:chat_name] && File.exist?(options[:chat_file])
|
96
|
+
messages_history = JSON.parse(File.read(options[:chat_file]))
|
97
|
+
messages_history.each do |message|
|
98
|
+
format_input(message['content']) if message['role'] == 'user'
|
99
|
+
format_response(message['content']) if message['role'] == 'assistant'
|
100
|
+
end
|
101
|
+
end
|
102
|
+
|
103
|
+
if options[:context]
|
104
|
+
context = DEFAULT_CONTEXTS[options[:context].to_sym] || options[:context]
|
105
|
+
messages_history << { role: 'system', content: context }
|
106
|
+
end
|
107
|
+
|
108
|
+
begin
|
109
|
+
loop do
|
110
|
+
prompt = options[:q]
|
111
|
+
prompt = get_input unless options[:q]
|
112
|
+
options[:q] = nil
|
113
|
+
messages_history << { "role": 'user', "content": prompt }
|
114
|
+
parameters = {
|
115
|
+
model: MODEL,
|
116
|
+
messages: messages_history,
|
117
|
+
temperature: 0.3, # low temperature = very high probability response (0 to 1)
|
118
|
+
max_tokens: 2000
|
119
|
+
}
|
120
|
+
puts "Sending: #{parameters}" if options[:d]
|
121
|
+
response = client.chat(parameters:)
|
122
|
+
puts "Received: #{response}" if options[:d]
|
123
|
+
begin
|
124
|
+
response_text = response['choices'][0]['message']['content']
|
125
|
+
if options[:chat]
|
126
|
+
format_response response_text
|
127
|
+
else
|
128
|
+
puts response_text
|
129
|
+
end
|
130
|
+
messages_history << { "role": 'assistant', "content": response_text }
|
131
|
+
File.write(options[:chat_file], messages_history.to_json) if options[:chat_name]
|
132
|
+
rescue StandardError
|
133
|
+
puts "Error: '#{response['error']['message']}'"
|
134
|
+
end
|
135
|
+
break unless options[:chat]
|
136
|
+
end
|
137
|
+
rescue Interrupt
|
138
|
+
puts "Your current chat was saved as '#{options[:chat_name]}' at #{options[:chat_file]}" if options[:chat_name]
|
139
|
+
exit 1
|
140
|
+
end
|
data/lib/version.rb
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
VERSION = '1.0.1'
|
metadata
ADDED
@@ -0,0 +1,135 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: ruby-openai-cli
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 1.0.1
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Thomas Schmidt
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2023-04-01 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: ruby-openai
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - "~>"
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '3.7'
|
20
|
+
type: :runtime
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - "~>"
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '3.7'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: optparse
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - "~>"
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '0.3'
|
34
|
+
type: :runtime
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - "~>"
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '0.3'
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: tty-markdown
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - "~>"
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '0.7'
|
48
|
+
type: :runtime
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - "~>"
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '0.7'
|
55
|
+
- !ruby/object:Gem::Dependency
|
56
|
+
name: awesome_print
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - "~>"
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: '1.9'
|
62
|
+
type: :development
|
63
|
+
prerelease: false
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - "~>"
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: '1.9'
|
69
|
+
- !ruby/object:Gem::Dependency
|
70
|
+
name: byebug
|
71
|
+
requirement: !ruby/object:Gem::Requirement
|
72
|
+
requirements:
|
73
|
+
- - "~>"
|
74
|
+
- !ruby/object:Gem::Version
|
75
|
+
version: '11.1'
|
76
|
+
type: :development
|
77
|
+
prerelease: false
|
78
|
+
version_requirements: !ruby/object:Gem::Requirement
|
79
|
+
requirements:
|
80
|
+
- - "~>"
|
81
|
+
- !ruby/object:Gem::Version
|
82
|
+
version: '11.1'
|
83
|
+
- !ruby/object:Gem::Dependency
|
84
|
+
name: rubocop
|
85
|
+
requirement: !ruby/object:Gem::Requirement
|
86
|
+
requirements:
|
87
|
+
- - "~>"
|
88
|
+
- !ruby/object:Gem::Version
|
89
|
+
version: '1.48'
|
90
|
+
type: :development
|
91
|
+
prerelease: false
|
92
|
+
version_requirements: !ruby/object:Gem::Requirement
|
93
|
+
requirements:
|
94
|
+
- - "~>"
|
95
|
+
- !ruby/object:Gem::Version
|
96
|
+
version: '1.48'
|
97
|
+
description: Using ChatGPT from the command line.
|
98
|
+
email: tom@digitalflow.de
|
99
|
+
executables:
|
100
|
+
- ruby-openai-cli
|
101
|
+
extensions: []
|
102
|
+
extra_rdoc_files: []
|
103
|
+
files:
|
104
|
+
- README.MD
|
105
|
+
- bin/ruby-openai-cli
|
106
|
+
- lib/context.rb
|
107
|
+
- lib/prompt.rb
|
108
|
+
- lib/version.rb
|
109
|
+
homepage: https://github.com/digitaltom/ruby-openai-cli
|
110
|
+
licenses:
|
111
|
+
- MIT
|
112
|
+
metadata:
|
113
|
+
homepage_uri: https://github.com/digitaltom/ruby-openai-cli
|
114
|
+
source_code_uri: https://github.com/digitaltom/ruby-openai-cli
|
115
|
+
changelog_uri: https://github.com/digitaltom/ruby-openai-cli
|
116
|
+
post_install_message:
|
117
|
+
rdoc_options: []
|
118
|
+
require_paths:
|
119
|
+
- lib
|
120
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
121
|
+
requirements:
|
122
|
+
- - ">="
|
123
|
+
- !ruby/object:Gem::Version
|
124
|
+
version: '0'
|
125
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
126
|
+
requirements:
|
127
|
+
- - ">="
|
128
|
+
- !ruby/object:Gem::Version
|
129
|
+
version: '0'
|
130
|
+
requirements: []
|
131
|
+
rubygems_version: 3.4.6
|
132
|
+
signing_key:
|
133
|
+
specification_version: 4
|
134
|
+
summary: CLI interface for OpenAI ChatGPT
|
135
|
+
test_files: []
|