openai-term 0.1.2 → 0.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. checksums.yaml +4 -4
  2. data/bin/openai +7 -8
  3. metadata +5 -4
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 6662b3d556ef03f4e07a6f5f853f5f62949af51ab1fb38603162ef51714503c0
4
- data.tar.gz: 64ac5a5907cb5e4df0fdfbf20919419612656017498308f25dabb32a31cef58a
3
+ metadata.gz: 56441d49f93ea1425b4285c4c18e5f6e293c79f49f19ffa6120906a53b5d07f1
4
+ data.tar.gz: 408cfec628544d09ace443aa5e67e998430d8e7acefe060efcb30e8a25e470df
5
5
  SHA512:
6
- metadata.gz: caa12a0dc09e98f85b2794973c54d822c3e27891f4bfec5f5903a42a892d0580f9105cffc13bf777dd3247ceb090cf026f0da7e457258b8f2de95bfd98872623
7
- data.tar.gz: 8f6eafc50d25b642e4a9fac1dc7f2f5531372838818ff23c61c5eefe995c70d99b0a6217400107a77945a04aa9fe3129984dae61553c66239a057385f1438d46
6
+ metadata.gz: 015c9d87cc89c8f3c913ac69349fe88407bad49e64f32e16fa9b3d39ec001ca9e3dfbbabecb69a6ca7691e63295064f13a5cca8116f7c3ef57f7c1f2b51af655
7
+ data.tar.gz: 6bec4745951211d8d623a6c43d57b146048b9c700780e08c7857fe38ab050a73f1b6660a012d0408a62f1c1dd823b041dada9473e9838939a4de87d47594069d
data/bin/openai CHANGED
@@ -13,13 +13,8 @@ require "ruby/openai"
13
13
 
14
14
  def model
15
15
  c = %w(text-davinci-003 code-cushman-001 text-curie-001 text-ada-001)
16
- m = @prompt.select("What AI model do you want to use?") do |menu|
17
- menu.choice c[0], 0
18
- menu.choice c[1], 1
19
- menu.choice c[2], 2
20
- menu.choice c[3], 3
21
- end
22
- return c[m]
16
+ m = @prompt.select("What AI model do you want to use? (see https://beta.openai.com/docs/models/codex for details)", c, cycle: true)
17
+ return m
23
18
  end
24
19
 
25
20
  # HANDLE COMMAND LINE OPTIONS
@@ -60,7 +55,11 @@ end
60
55
  # REQUEST RESPONSE
61
56
  client = OpenAI::Client.new(access_token: @ai)
62
57
 
63
- response = client.completions( parameters: { model: @m, prompt: @q, max_tokens: @x })
58
+ begin
59
+ response = client.completions( parameters: { model: @m, prompt: @q, max_tokens: @x })
60
+ rescue => error
61
+ p error
62
+ end
64
63
 
65
64
  #PRINT RESPONSE
66
65
  begin
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: openai-term
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.2
4
+ version: 0.1.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Geir Isene
@@ -38,10 +38,11 @@ dependencies:
38
38
  - - "~>"
39
39
  - !ruby/object:Gem::Version
40
40
  version: '0.23'
41
- description: This is a pretty straight forward interface to OpenAI with the option
41
+ description: 'This is a pretty straight forward interface to OpenAI with the option
42
42
  to select the AI model and the maximum token length (number of maximum words in
43
- the AI's response). You will use the -t option to supply the query to OpenAI or
44
- the -f option to read the query from a text file instead.
43
+ the AI''s response). You will use the -t option to supply the query to OpenAI or
44
+ the -f option to read the query from a text file instead. New in 0.1.4: Better menu
45
+ for choosing AI model (-m option).'
45
46
  email: g@isene.com
46
47
  executables:
47
48
  - openai