openai-please 0.1.2 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.gitignore +1 -0
- data/.rubocop.yml +8 -1
- data/Gemfile.lock +9 -13
- data/README.md +49 -3
- data/docker-compose.yml +1 -0
- data/lib/please/cli.rb +27 -4
- data/lib/please/context.rb +59 -56
- data/lib/please/openai/codex_service.rb +2 -4
- data/lib/please/request.rb +2 -2
- data/lib/please/version.rb +1 -1
- data/please.gemspec +2 -2
- metadata +9 -9
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: f57df851a98cb38c098cae4762fb1174ceb8a43e6bae36104c1dcc479fbcedb9
|
4
|
+
data.tar.gz: 23e3002663ae954bfd109500a7bedde579a5f28add34fb94abfdf114a48dbe92
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: fa0e870e7d6ef5cb147ea92b96ecaf6b5dae6c4c138732dfcca90a2af5646be62fdc1aa59931aa19c5d1f4197616116d223f218e4bd4df190a4bd41f5e30cb44
|
7
|
+
data.tar.gz: 32b6ebf0c3fa2f5228dfd2a97ff019e01984d59f92e27d1f50fafbaab53b841c00be4b4678d1caf87983a8b00a3770d4488a7746aecf3eeeac92936680c71a8e
|
data/.gitignore
CHANGED
data/.rubocop.yml
CHANGED
@@ -19,8 +19,15 @@ Style/TrailingCommaInArguments:
|
|
19
19
|
Enabled: true
|
20
20
|
EnforcedStyleForMultiline: comma
|
21
21
|
|
22
|
+
Style/TrailingCommaInArrayLiteral:
|
23
|
+
Enabled: true
|
24
|
+
EnforcedStyleForMultiline: comma
|
25
|
+
|
22
26
|
Style/Documentation:
|
23
27
|
Enabled: false
|
24
28
|
|
29
|
+
Layout/MultilineMethodCallIndentation:
|
30
|
+
Enabled: false
|
31
|
+
|
25
32
|
Layout/LineLength:
|
26
|
-
|
33
|
+
Enabled: false
|
data/Gemfile.lock
CHANGED
@@ -1,21 +1,18 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
openai-please (0.
|
5
|
-
ruby-openai (
|
6
|
-
tty-prompt (
|
4
|
+
openai-please (0.2.0)
|
5
|
+
ruby-openai (~> 3.0)
|
6
|
+
tty-prompt (~> 0.23)
|
7
7
|
|
8
8
|
GEM
|
9
9
|
remote: https://rubygems.org/
|
10
10
|
specs:
|
11
11
|
ast (2.4.2)
|
12
|
-
|
13
|
-
|
14
|
-
mime-types (~> 3.0)
|
12
|
+
httparty (0.21.0)
|
13
|
+
mini_mime (>= 1.0.0)
|
15
14
|
multi_xml (>= 0.5.2)
|
16
|
-
|
17
|
-
mime-types-data (~> 3.2015)
|
18
|
-
mime-types-data (3.2021.0901)
|
15
|
+
mini_mime (1.1.2)
|
19
16
|
minitest (5.14.4)
|
20
17
|
multi_xml (0.6.0)
|
21
18
|
parallel (1.21.0)
|
@@ -38,9 +35,8 @@ GEM
|
|
38
35
|
unicode-display_width (>= 1.4.0, < 3.0)
|
39
36
|
rubocop-ast (1.11.0)
|
40
37
|
parser (>= 3.0.1.1)
|
41
|
-
ruby-openai (
|
42
|
-
|
43
|
-
httparty (~> 0.18.1)
|
38
|
+
ruby-openai (3.0.3)
|
39
|
+
httparty (>= 0.18.1, < 0.22.0)
|
44
40
|
ruby-progressbar (1.11.0)
|
45
41
|
tty-color (0.6.0)
|
46
42
|
tty-cursor (0.7.1)
|
@@ -65,4 +61,4 @@ DEPENDENCIES
|
|
65
61
|
rubocop (~> 1.7)
|
66
62
|
|
67
63
|
BUNDLED WITH
|
68
|
-
2.
|
64
|
+
2.4.3
|
data/README.md
CHANGED
@@ -10,7 +10,7 @@ Convert natural language to bash commands using OpenAI Codex
|
|
10
10
|
|
11
11
|
$ gem install openai-please
|
12
12
|
|
13
|
-
|
13
|
+
Provide your [OpenAI API key](https://help.openai.com/en/articles/5480100-how-do-i-gain-access-to-openai-codex) in an environment variable, or using the [configuration file](#configuration).
|
14
14
|
|
15
15
|
```
|
16
16
|
OPENAI_ACCESS_TOKEN=[YOUR ACCESS TOKEN HERE]
|
@@ -18,20 +18,66 @@ OPENAI_ACCESS_TOKEN=[YOUR ACCESS TOKEN HERE]
|
|
18
18
|
|
19
19
|
## Usage
|
20
20
|
|
21
|
+
Type `please`, followed by an instruction written in natural language.
|
22
|
+
|
21
23
|
```shell
|
22
24
|
user@host:~$ please find all files larger than 1 mb
|
23
25
|
$ find . -type f -size +1M
|
24
26
|
Run the command? (enter "h" for help) [y,n,e,h]
|
25
27
|
```
|
26
28
|
|
27
|
-
You can [e]dit the command before running it by pressing the 'e' key. This uses the
|
29
|
+
You can [e]dit the command before running it by pressing the 'e' key. This uses the command specified in the `$EDITOR` variable, or `vi` if no editor is set.
|
30
|
+
|
31
|
+
See `please --help` for more information.
|
32
|
+
|
33
|
+
## Configuration
|
34
|
+
|
35
|
+
You can modify the default configuration in `~/.config/please/config.yml`.
|
36
|
+
|
37
|
+
```yaml
|
38
|
+
send_pwd: false # default: true
|
39
|
+
send_ls: false # default: true
|
40
|
+
send_uname: true # default: true
|
41
|
+
|
42
|
+
access_token: ... # default: $OPENAI_ACCESS_TOKEN
|
43
|
+
|
44
|
+
examples: # default: []
|
45
|
+
- instruction: Run my super secret command
|
46
|
+
command: /super/secret/command
|
47
|
+
|
48
|
+
- instruction: Show git status
|
49
|
+
command: git status
|
50
|
+
execute: true # default: false
|
51
|
+
|
52
|
+
skip_default_examples: true # default: false
|
53
|
+
```
|
54
|
+
|
55
|
+
### `examples`
|
56
|
+
|
57
|
+
Any examples listed here will be added to the prompt in the following form.
|
58
|
+
|
59
|
+
```
|
60
|
+
# INSTRUCTION
|
61
|
+
$ COMMAND
|
62
|
+
[RESULT, if execute is set to true]
|
63
|
+
```
|
64
|
+
|
65
|
+
If `execute` is set to true, the command will be executed prior to sending the request and the result will be included in the prompt. This is useful for providing dynamic context which the AI can use to inform completions.
|
66
|
+
|
67
|
+
### `skip_default_examples`
|
68
|
+
|
69
|
+
Do not include the default set of examples in the prompt. You can see the full prompt by running `please --show-prompt`.
|
70
|
+
|
71
|
+
*Note:* This option does not automatically imply `send_*: false`. To remove all examples from the prompt other than those explicitly specified, use this option in combination with the `send_*: false` options.
|
28
72
|
|
29
73
|
## Privacy
|
30
74
|
|
31
|
-
|
75
|
+
By default, the result of each of the following commands is sent to OpenAI Codex to improve the relevance of completions.
|
32
76
|
|
33
77
|
- `pwd`
|
34
78
|
- `uname -a`
|
35
79
|
- `ls -a`
|
36
80
|
|
81
|
+
This behaviour can be disabled using the configuration options described above. You can review the prompt before sending it to OpenAI by running `please --show-prompt`.
|
82
|
+
|
37
83
|
See [OpenAI's privacy policy](https://beta.openai.com/policies/privacy-policy) for more information.
|
data/docker-compose.yml
CHANGED
data/lib/please/cli.rb
CHANGED
@@ -2,10 +2,14 @@
|
|
2
2
|
|
3
3
|
require 'English'
|
4
4
|
require 'tty-prompt'
|
5
|
+
require 'yaml'
|
5
6
|
require 'optparse'
|
6
7
|
require 'please'
|
7
8
|
require 'tempfile'
|
8
9
|
|
10
|
+
USAGE = 'Usage: please [options] <instruction>'
|
11
|
+
CONFIG_FILE_PATH = File.expand_path('~/.config/please/config.yml')
|
12
|
+
|
9
13
|
begin
|
10
14
|
tty_prompt = TTY::Prompt.new
|
11
15
|
|
@@ -14,13 +18,27 @@ begin
|
|
14
18
|
send_pwd: true,
|
15
19
|
send_uname: true,
|
16
20
|
send_ls: true,
|
21
|
+
access_token: ENV.fetch('OPENAI_ACCESS_TOKEN', nil),
|
22
|
+
examples: [],
|
23
|
+
skip_default_examples: false,
|
17
24
|
}
|
18
25
|
|
19
|
-
|
26
|
+
if File.exist?(CONFIG_FILE_PATH)
|
27
|
+
begin
|
28
|
+
options.merge! YAML.load_file(CONFIG_FILE_PATH).transform_keys(&:to_sym)
|
29
|
+
options[:examples].each { |example| example.transform_keys!(&:to_sym) }
|
30
|
+
rescue StandardError
|
31
|
+
tty_prompt.warn 'Could not parse config file. Ignoring.'
|
32
|
+
end
|
33
|
+
end
|
20
34
|
|
21
35
|
OptionParser.new do |opts|
|
22
36
|
opts.banner = USAGE
|
23
37
|
|
38
|
+
opts.on('--show-config-path', 'Output the location of the config file, and then exit') do |v|
|
39
|
+
options[:show_config_path] = v
|
40
|
+
end
|
41
|
+
|
24
42
|
opts.on('--show-prompt', 'Output the prompt that would ordinarily be sent to OpenAI Codex, and then exit') do |v|
|
25
43
|
options[:show_prompt] = v
|
26
44
|
end
|
@@ -38,12 +56,17 @@ begin
|
|
38
56
|
end
|
39
57
|
end.parse!
|
40
58
|
|
41
|
-
|
42
|
-
tty_prompt.
|
59
|
+
if options[:show_config_path]
|
60
|
+
tty_prompt.say CONFIG_FILE_PATH
|
61
|
+
exit
|
62
|
+
end
|
63
|
+
|
64
|
+
if options[:access_token].nil?
|
65
|
+
tty_prompt.error "Access token not found. Set it in #{CONFIG_FILE_PATH} or $OPENAI_ACCESS_TOKEN."
|
43
66
|
exit 1
|
44
67
|
end
|
45
68
|
|
46
|
-
codex_service = Please::OpenAI::CodexService.new(access_token: access_token)
|
69
|
+
codex_service = Please::OpenAI::CodexService.new(access_token: options[:access_token])
|
47
70
|
|
48
71
|
instruction = ARGV.join(' ')
|
49
72
|
|
data/lib/please/context.rb
CHANGED
@@ -1,30 +1,69 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
+
DEFAULT_EXAMPLES = [
|
4
|
+
{
|
5
|
+
instruction: 'Find all files older than 1 week and open each of them in vim',
|
6
|
+
command: 'find . -type f -mtime +7 -exec vim {} \;',
|
7
|
+
},
|
8
|
+
|
9
|
+
{
|
10
|
+
instruction: 'Show a clock which updates every second on a single line',
|
11
|
+
command: 'printf \'import time\nwhile True:\n\tprint(time.strftime("%%H:%%M:%%S"), end="\\\\r")\n\ttime.sleep(1)\' > /tmp/program.py; python3 /tmp/program.py; rm /tmp/program.py',
|
12
|
+
},
|
13
|
+
|
14
|
+
{
|
15
|
+
instruction: 'Write a python program that prints out hello world with each letter on its own line',
|
16
|
+
command: 'printf \'print("\\\\n".join(list("Hello World"))\' > /tmp/program.py; python3 /tmp/program.py; rm /tmp/program.py',
|
17
|
+
},
|
18
|
+
|
19
|
+
{
|
20
|
+
instruction: 'Read from stdin until EOF, and then output the length of the string',
|
21
|
+
command: 'printf \'import sys\nprint(len(sys.stdin.read().strip()))\' > /tmp/program.py; python3 /tmp/program.py; rm /tmp/program.py',
|
22
|
+
},
|
23
|
+
|
24
|
+
{
|
25
|
+
instruction: 'Run the fortune command 5 times',
|
26
|
+
command: 'for i in $(seq 5); do fortune; done',
|
27
|
+
},
|
28
|
+
|
29
|
+
{
|
30
|
+
instruction: 'Repeadedly read a single line from the user, reverse it, and print it back',
|
31
|
+
command: 'printf \'while True:\n\tline = input()\n\tprint(line[::-1])\' > /tmp/program.py; python3 /tmp/program.py; rm /tmp/program.py',
|
32
|
+
},
|
33
|
+
].freeze
|
34
|
+
|
35
|
+
OPTIONAL_EXAMPLES = {
|
36
|
+
pwd: {
|
37
|
+
instruction: 'Print the current working directory',
|
38
|
+
command: 'pwd',
|
39
|
+
execute: true,
|
40
|
+
},
|
41
|
+
|
42
|
+
uname: {
|
43
|
+
instruction: 'Show information about the operating system',
|
44
|
+
command: 'uname -a',
|
45
|
+
execute: true,
|
46
|
+
},
|
47
|
+
|
48
|
+
ls: {
|
49
|
+
instruction: 'List all files in the current directory',
|
50
|
+
command: 'ls -a',
|
51
|
+
execute: true,
|
52
|
+
},
|
53
|
+
}.freeze
|
54
|
+
|
3
55
|
module Please
|
4
56
|
class Context
|
5
57
|
def initialize(options)
|
6
|
-
@examples =
|
58
|
+
@examples = []
|
7
59
|
|
8
|
-
|
9
|
-
@examples << {
|
10
|
-
instruction: 'Print the current working directory',
|
11
|
-
command: 'pwd',
|
12
|
-
}
|
13
|
-
end
|
60
|
+
@examples += DEFAULT_EXAMPLES unless options[:skip_default_examples]
|
14
61
|
|
15
|
-
if options[:
|
16
|
-
|
17
|
-
|
18
|
-
command: 'uname -a',
|
19
|
-
}
|
20
|
-
end
|
62
|
+
@examples << OPTIONAL_EXAMPLES[:pwd] if options[:send_pwd]
|
63
|
+
@examples << OPTIONAL_EXAMPLES[:uname] if options[:send_uname]
|
64
|
+
@examples << OPTIONAL_EXAMPLES[:ls] if options[:send_ls]
|
21
65
|
|
22
|
-
|
23
|
-
@examples << {
|
24
|
-
instruction: 'List all files in the current directory',
|
25
|
-
command: 'ls -a',
|
26
|
-
}
|
27
|
-
end
|
66
|
+
@examples += options[:examples]
|
28
67
|
end
|
29
68
|
|
30
69
|
def to_s
|
@@ -32,45 +71,9 @@ module Please
|
|
32
71
|
<<~EXAMPLE.chomp
|
33
72
|
# #{example[:instruction]}
|
34
73
|
$ #{example[:command]}
|
35
|
-
#{example
|
74
|
+
#{example[:execute] ? `#{example[:command]}` : ""}
|
36
75
|
EXAMPLE
|
37
76
|
end.join("\n")
|
38
77
|
end
|
39
|
-
|
40
|
-
private
|
41
|
-
|
42
|
-
def default_examples
|
43
|
-
[
|
44
|
-
{
|
45
|
-
instruction: 'Find all files older than 1 week and open each of them in vim',
|
46
|
-
command: 'find . -type f -mtime +7 -exec vim {} \;',
|
47
|
-
result: '',
|
48
|
-
},
|
49
|
-
|
50
|
-
{
|
51
|
-
instruction: 'Download a random dog picture',
|
52
|
-
command: 'printf \'import urllib.request\nimport json\nimport subprocess\ndata = urllib.request.urlopen("https://dog.ceo/api/breeds/image/random").read()\nurl = json.loads(data)["message"]\nsubprocess.call(["curl", url, "-o", "dog.jpg"])\' > /tmp/program.py; python3 /tmp/program.py; rm /tmp/program.py',
|
53
|
-
result: '',
|
54
|
-
},
|
55
|
-
|
56
|
-
{
|
57
|
-
instruction: 'Read from stdin until EOF, and then output the length of the string',
|
58
|
-
command: 'printf \'import sys\nprint(len(sys.stdin.read().strip()))\' > /tmp/program.py; python3 /tmp/program.py; rm /tmp/program.py',
|
59
|
-
result: '',
|
60
|
-
},
|
61
|
-
|
62
|
-
{
|
63
|
-
instruction: 'Run the fortune command 5 times',
|
64
|
-
command: 'for i in $(seq 5); do fortune; done',
|
65
|
-
result: '',
|
66
|
-
},
|
67
|
-
|
68
|
-
{
|
69
|
-
instruction: 'Repeadedly read a single line from the user, reverse it, and print it back',
|
70
|
-
command: 'printf \'while True:\n\tline = input()\n\tprint(line[::-1])\' > /tmp/program.py; python3 /tmp/program.py; rm /tmp/program.py',
|
71
|
-
result: '',
|
72
|
-
},
|
73
|
-
]
|
74
|
-
end
|
75
78
|
end
|
76
79
|
end
|
@@ -9,10 +9,7 @@ module Please
|
|
9
9
|
client = ::OpenAI::Client.new(access_token: access_token)
|
10
10
|
|
11
11
|
response = client.completions(
|
12
|
-
|
13
|
-
parameters: default_parameters.merge(
|
14
|
-
prompt: prompt,
|
15
|
-
),
|
12
|
+
parameters: default_parameters.merge(prompt: prompt),
|
16
13
|
)
|
17
14
|
|
18
15
|
response.parsed_response.fetch('choices').first.fetch('text')
|
@@ -22,6 +19,7 @@ module Please
|
|
22
19
|
|
23
20
|
def default_parameters
|
24
21
|
{
|
22
|
+
model: 'code-davinci-002',
|
25
23
|
temperature: 0,
|
26
24
|
max_tokens: 512,
|
27
25
|
top_p: 1,
|
data/lib/please/request.rb
CHANGED
@@ -8,7 +8,7 @@ module Please
|
|
8
8
|
# Collapse multiline commands into one line
|
9
9
|
.gsub(/\s*\\\n\s*/, ' ')
|
10
10
|
# Remove subsequent lines that do not contain commands
|
11
|
-
.gsub(/\n[
|
11
|
+
.gsub(/\n[^$][^\n]*$/, '')
|
12
12
|
# Collapse multiple commands into one line
|
13
13
|
.gsub(/\n\$ /, '; ')
|
14
14
|
# Remove multiple consecutive spaces
|
@@ -19,7 +19,7 @@ module Please
|
|
19
19
|
<<~PROMPT.chomp
|
20
20
|
Write a one-line bash command for each of the following tasks.
|
21
21
|
|
22
|
-
#{context
|
22
|
+
#{context}
|
23
23
|
|
24
24
|
# #{instruction.gsub(/\n/, " ")}
|
25
25
|
$
|
data/lib/please/version.rb
CHANGED
data/please.gemspec
CHANGED
@@ -26,8 +26,8 @@ Gem::Specification.new do |spec|
|
|
26
26
|
spec.require_paths = ['lib']
|
27
27
|
|
28
28
|
# Uncomment to register a new dependency of your gem
|
29
|
-
spec.add_dependency 'ruby-openai', '
|
30
|
-
spec.add_dependency 'tty-prompt', '
|
29
|
+
spec.add_dependency 'ruby-openai', '~> 3.0'
|
30
|
+
spec.add_dependency 'tty-prompt', '~> 0.23'
|
31
31
|
|
32
32
|
# For more information and examples about making a new gem, checkout our
|
33
33
|
# guide at: https://bundler.io/guides/creating_gem.html
|
metadata
CHANGED
@@ -1,41 +1,41 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: openai-please
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.2.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Joe Anderson
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2023-01-13 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: ruby-openai
|
15
15
|
requirement: !ruby/object:Gem::Requirement
|
16
16
|
requirements:
|
17
|
-
- - "
|
17
|
+
- - "~>"
|
18
18
|
- !ruby/object:Gem::Version
|
19
|
-
version: '
|
19
|
+
version: '3.0'
|
20
20
|
type: :runtime
|
21
21
|
prerelease: false
|
22
22
|
version_requirements: !ruby/object:Gem::Requirement
|
23
23
|
requirements:
|
24
|
-
- - "
|
24
|
+
- - "~>"
|
25
25
|
- !ruby/object:Gem::Version
|
26
|
-
version: '
|
26
|
+
version: '3.0'
|
27
27
|
- !ruby/object:Gem::Dependency
|
28
28
|
name: tty-prompt
|
29
29
|
requirement: !ruby/object:Gem::Requirement
|
30
30
|
requirements:
|
31
|
-
- - "
|
31
|
+
- - "~>"
|
32
32
|
- !ruby/object:Gem::Version
|
33
33
|
version: '0.23'
|
34
34
|
type: :runtime
|
35
35
|
prerelease: false
|
36
36
|
version_requirements: !ruby/object:Gem::Requirement
|
37
37
|
requirements:
|
38
|
-
- - "
|
38
|
+
- - "~>"
|
39
39
|
- !ruby/object:Gem::Version
|
40
40
|
version: '0.23'
|
41
41
|
description:
|
@@ -87,7 +87,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
87
87
|
- !ruby/object:Gem::Version
|
88
88
|
version: '0'
|
89
89
|
requirements: []
|
90
|
-
rubygems_version: 3.
|
90
|
+
rubygems_version: 3.4.1
|
91
91
|
signing_key:
|
92
92
|
specification_version: 4
|
93
93
|
summary: Convert natural language to bash commands using OpenAI Codex
|