openai-term 1.2 → 1.3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/bin/openai +3 -3
- metadata +8 -8
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 8cbbff6f66b0be7142ea17264913d04d81bff65ccb7dcd907078cd20d0cecab7
|
4
|
+
data.tar.gz: c78fd899435b84207c0dfbd0dbcd0d7338ba3a39d9d6fe363bb5561a6c12a3ca
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 7be5cf192e04651ac07ccc27761a0790db85aa14beb0106d8dab93a9483bd0155d471371eabc0a46e6f8c96d1bb019437bc6365d824536ced020a2edf76f1e6f
|
7
|
+
data.tar.gz: 34a9532402510e670340c8b1bef5632649368342bc2309f632fdeeb55eb6c2e877206a315e2b7381b9bbffada1c1b3c46f23a088f631a2c55289b69a2a539c8f
|
data/bin/openai
CHANGED
@@ -8,7 +8,7 @@ require "ruby/openai"
|
|
8
8
|
|
9
9
|
# INITIALIZE CONSTANTS
|
10
10
|
@x = 200
|
11
|
-
@m = "
|
11
|
+
@m = "gpt-3.5-turbo-instruct"
|
12
12
|
@prompt = TTY::Prompt.new
|
13
13
|
|
14
14
|
def model
|
@@ -27,7 +27,7 @@ optparse = OptionParser.new do |opts|
|
|
27
27
|
opts.on('-f', '--file textfile', 'A file to process') { |f| @f = f }
|
28
28
|
opts.on('-t', '--text text', 'The text to process') { |t| @t = t }
|
29
29
|
opts.on('-x', '--max max_tokens', 'Specify max number of words in response') { |x| @x = x.to_i }
|
30
|
-
opts.on('-m', '--model', 'The AI model to use (default =
|
30
|
+
opts.on('-m', '--model', 'The AI model to use (default = gpt-3.5-turbo-instruct') { @m = model }
|
31
31
|
opts.on('-i', '--image', 'Create an image with the text supplied by -t or -f') { @i = true }
|
32
32
|
opts.on('-h', 'Display SHORT help text') { puts opts; exit }
|
33
33
|
opts.on('-v', '--version', 'Display the version number') { puts "Version: 0.1"; exit }
|
@@ -45,8 +45,8 @@ end
|
|
45
45
|
|
46
46
|
# PROCESS QUERY
|
47
47
|
@q = ""
|
48
|
-
@q += File.read(@f) if @f
|
49
48
|
@q += @t if @t
|
49
|
+
@q += File.read(@f) if @f
|
50
50
|
unless @f or @t
|
51
51
|
puts "You must supply a query in form of a text file (option -f file) and/or text (option -t text)\n\n"
|
52
52
|
exit
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: openai-term
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: '1.
|
4
|
+
version: '1.3'
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Geir Isene
|
8
|
-
autorequire:
|
8
|
+
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2024-10-27 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: ruby-openai
|
@@ -41,8 +41,8 @@ dependencies:
|
|
41
41
|
description: 'This is a pretty straight forward interface to OpenAI with the option
|
42
42
|
to select the AI model and the maximum token length (number of maximum words in
|
43
43
|
the AI''s response). You will use the -t option to supply the query to OpenAI or
|
44
|
-
the -f option to read the query from a text file instead. New in 1.
|
45
|
-
|
44
|
+
the -f option to read the query from a text file instead. New in 1.3: Updated default
|
45
|
+
model to gpt-3.5-turbo-instruct.'
|
46
46
|
email: g@isene.com
|
47
47
|
executables:
|
48
48
|
- openai
|
@@ -55,7 +55,7 @@ licenses:
|
|
55
55
|
- Unlicense
|
56
56
|
metadata:
|
57
57
|
source_code_uri: https://github.com/isene/openai
|
58
|
-
post_install_message:
|
58
|
+
post_install_message:
|
59
59
|
rdoc_options: []
|
60
60
|
require_paths:
|
61
61
|
- lib
|
@@ -70,8 +70,8 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
70
70
|
- !ruby/object:Gem::Version
|
71
71
|
version: '0'
|
72
72
|
requirements: []
|
73
|
-
rubygems_version: 3.
|
74
|
-
signing_key:
|
73
|
+
rubygems_version: 3.4.20
|
74
|
+
signing_key:
|
75
75
|
specification_version: 4
|
76
76
|
summary: openai is a terminal interface to the OpenAI solution at beta.openai.com
|
77
77
|
test_files: []
|