openai-term 0.1.4 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/bin/openai +15 -10
- metadata +5 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: cc2ef0acdef82bc39ba989179889cbeda29564a8e2d01b23f39ce1e502f43929
|
4
|
+
data.tar.gz: 95691aa7a8fe4a5c20973d2421fd82e0f64de72b81aa1af6eddb189e2d2a89b6
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 4d297e98de6a0b0f124dccd1d9f0db9a71c80d271a1dc3dc7eb336ef82539be668cb878e9cf91736d6083f6cefeb89141e5bc34d118708fa6abe998c1272c9f4
|
7
|
+
data.tar.gz: 4e5366d42797d2d6cba8357a0b1fbf067d469b3579d457675f2b99ce3590ee7b914f77885a5225562047cc7b996cb0363f108030f4a5a61c782fcaa4f3e97580
|
data/bin/openai
CHANGED
@@ -28,6 +28,7 @@ optparse = OptionParser.new do |opts|
|
|
28
28
|
opts.on('-t', '--text text', 'The text to process') { |t| @t = t }
|
29
29
|
opts.on('-x', '--max max_tokens', 'Specify max number of words in response') { |x| @x = x.to_i }
|
30
30
|
opts.on('-m', '--model', 'The AI model to use (default = text-davinci-003i)') { @m = model }
|
31
|
+
opts.on('-i', '--image', 'Create an image with the text supplied by -t or -f') { @i = true }
|
31
32
|
opts.on('-h', 'Display SHORT help text') { puts opts; exit }
|
32
33
|
opts.on('-v', '--version', 'Display the version number') { puts "Version: 0.1"; exit }
|
33
34
|
end
|
@@ -52,21 +53,25 @@ else
|
|
52
53
|
exit
|
53
54
|
end
|
54
55
|
|
55
|
-
# REQUEST RESPONSE
|
56
|
+
# REQUEST AND PRINT RESPONSE
|
56
57
|
client = OpenAI::Client.new(access_token: @ai)
|
57
58
|
|
58
59
|
begin
|
59
|
-
|
60
|
+
if @i
|
61
|
+
response = client.images.generate(parameters: { prompt: @q })
|
62
|
+
puts "Image url:"
|
63
|
+
puts response.dig("data", 0, "url")
|
64
|
+
else
|
65
|
+
response = client.completions( parameters: { model: @m, prompt: @q, max_tokens: @x })
|
66
|
+
begin
|
67
|
+
output = response["choices"][0]["text"]
|
68
|
+
rescue
|
69
|
+
output = "No OpenAI response"
|
70
|
+
end
|
71
|
+
puts output.strip + "\n\n"
|
72
|
+
end
|
60
73
|
rescue => error
|
61
74
|
p error
|
62
75
|
end
|
63
76
|
|
64
|
-
#PRINT RESPONSE
|
65
|
-
begin
|
66
|
-
output = response["choices"][0]["text"]
|
67
|
-
rescue
|
68
|
-
output = "No OpenAI response"
|
69
|
-
end
|
70
|
-
puts output.strip + "\n\n"
|
71
|
-
|
72
77
|
# vim: set sw=2 sts=2 et ft=ruby fdm=syntax fdn=2 fcs=fold\:\ :
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: openai-term
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 1.0.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Geir Isene
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2023-01-
|
11
|
+
date: 2023-01-27 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: ruby-openai
|
@@ -41,8 +41,9 @@ dependencies:
|
|
41
41
|
description: 'This is a pretty straight forward interface to OpenAI with the option
|
42
42
|
to select the AI model and the maximum token length (number of maximum words in
|
43
43
|
the AI''s response). You will use the -t option to supply the query to OpenAI or
|
44
|
-
the -f option to read the query from a text file instead. New in 0.
|
45
|
-
|
44
|
+
the -f option to read the query from a text file instead. New in 1.0.0: Added the
|
45
|
+
functionality to create images via the -i option (in conjunction with either the
|
46
|
+
-t or the -f option).'
|
46
47
|
email: g@isene.com
|
47
48
|
executables:
|
48
49
|
- openai
|