hyrum 0.0.1 → 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +38 -0
- data/README.md +2 -4
- data/lib/hyrum/generators/fake_generator.rb +1 -1
- data/lib/hyrum/generators/message_generator.rb +4 -2
- data/lib/hyrum/generators/openai_generator.rb +17 -22
- data/lib/hyrum/script_options.rb +28 -18
- data/lib/hyrum/version.rb +2 -2
- data/lib/hyrum.rb +4 -1
- metadata +5 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: b7177d332d4a6a3e7a9527a906e39d4f141af5444aedbb3acff837b07b93f619
|
4
|
+
data.tar.gz: 9b0d67445326a57b5aa4f1bc2926cf06455609f491c75c08e65889a8d019b481
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: aa204a19e6e0080792381c09904cc0fcf013b05490875b4f793f4135b3cec0d605d544261266a6b00f54be2a30ddfef4d3fd44e2f8be17060f49924fabe528b3
|
7
|
+
data.tar.gz: 17ceb22c5ea1b7a3837ce9195cc4b0f8a9b4276d502e3a9b4f4450aec61f5bcb780eaf135f2d9ac92717c060a81cf1423af751ef34de54faf532850c67b024f0
|
data/CHANGELOG.md
ADDED
@@ -0,0 +1,38 @@
|
|
1
|
+
# Changelog
|
2
|
+
|
3
|
+
All notable changes to this project will be documented in this file.
|
4
|
+
|
5
|
+
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
|
6
|
+
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
7
|
+
|
8
|
+
## [0.1.0] - 2024-12-16
|
9
|
+
|
10
|
+
### Fixed
|
11
|
+
|
12
|
+
- Minor bug fixes and spec updates
|
13
|
+
- Option defaults added in help where appropriate
|
14
|
+
|
15
|
+
### Added
|
16
|
+
|
17
|
+
- New -n | --number option to specify number of messages to produce
|
18
|
+
- New ScriptOption specs
|
19
|
+
- New MessageGenerator specs
|
20
|
+
- New FakeGenerator specs
|
21
|
+
|
22
|
+
## [0.0.2] - 2024-12-06
|
23
|
+
|
24
|
+
### Fixed
|
25
|
+
|
26
|
+
- Minor bug fixes
|
27
|
+
- Option defaults
|
28
|
+
|
29
|
+
### Added
|
30
|
+
|
31
|
+
- New OpenAI generator specs
|
32
|
+
|
33
|
+
## [0.0.1] - 2024-12-01
|
34
|
+
|
35
|
+
### Added
|
36
|
+
|
37
|
+
- This CHANGELOG file
|
38
|
+
- Initial version of the project
|
data/README.md
CHANGED
@@ -21,7 +21,8 @@ Also, we don't want to spend a lot of time writing variations of the same messag
|
|
21
21
|
This is the use case Hyrum tries to solve. It uses an AI service (openai,
|
22
22
|
ollama, etc.) to generate variations of a provided message. The generated
|
23
23
|
variations are also formatted in the language/format of your choice (ruby,
|
24
|
-
json, etc.).
|
24
|
+
json, etc.). This code can then be used in your project to ensure messages are
|
25
|
+
no longer static, improving your api design.
|
25
26
|
|
26
27
|
## Example
|
27
28
|
|
@@ -71,9 +72,6 @@ Usage: hyrum [options]
|
|
71
72
|
```
|
72
73
|
|
73
74
|
## Installation
|
74
|
-
NOTE: This gem is not yet available on rubygems.org. You can install it from the
|
75
|
-
repository with `bundle exec rake install`. In the future, you can...
|
76
|
-
|
77
75
|
Install the gem and add to the application's Gemfile by executing:
|
78
76
|
|
79
77
|
$ bundle add hyrum
|
@@ -18,9 +18,11 @@ module Hyrum
|
|
18
18
|
|
19
19
|
class MessageGenerator
|
20
20
|
def self.create(options)
|
21
|
-
|
21
|
+
unless GENERATOR_CLASSES.key?(options[:ai_service].to_sym)
|
22
|
+
raise ArgumentError, "Invalid AI service: #{options[:ai_service]}"
|
23
|
+
end
|
22
24
|
|
23
|
-
|
25
|
+
generator_class = GENERATOR_CLASSES[options[:ai_service].to_sym]
|
24
26
|
generator_class.new(options)
|
25
27
|
end
|
26
28
|
end
|
@@ -11,17 +11,13 @@ module Hyrum
|
|
11
11
|
|
12
12
|
def initialize(options)
|
13
13
|
@options = options
|
14
|
-
configure
|
15
|
-
@client = OpenAI::Client.new
|
16
14
|
end
|
17
15
|
|
18
16
|
def generate
|
19
|
-
|
20
|
-
return JSON.parse(canned_content)
|
21
|
-
end
|
17
|
+
configure
|
22
18
|
|
23
|
-
response =
|
24
|
-
puts "
|
19
|
+
response = chat_response
|
20
|
+
puts "OpenAI response: #{JSON.pretty_generate(response)}" if options[:verbose]
|
25
21
|
content = response.dig('choices', 0, 'message', 'content')
|
26
22
|
JSON.parse(content)
|
27
23
|
end
|
@@ -30,25 +26,19 @@ module Hyrum
|
|
30
26
|
|
31
27
|
def prompt
|
32
28
|
prompt = <<~PROMPT
|
33
|
-
Please provide
|
29
|
+
Please provide <%= number %> alternative status messages for the following message:
|
34
30
|
`<%= message %>`. The messages should be unique and informative. The messages
|
35
31
|
should be returned as json in the format: `{ "<%= key %>": ['list', 'of', 'messages']}`
|
36
32
|
The key should be `"<%= key %>"` followed by the list of messages.
|
37
33
|
PROMPT
|
38
|
-
erb_hash = { key: options[:key], message: options[:message] }
|
34
|
+
erb_hash = { key: options[:key], message: options[:message], number: options[:number] }
|
39
35
|
template = ERB.new(prompt, trim_mode: '-')
|
40
36
|
template.result_with_hash(erb_hash)
|
41
37
|
end
|
42
38
|
|
43
|
-
def
|
44
|
-
|
45
|
-
|
46
|
-
model: options[:ai_model],
|
47
|
-
response_format: { type: 'json_object' },
|
48
|
-
messages: [{ role: 'user', content: prompt}],
|
49
|
-
temperature: 0.7
|
50
|
-
}
|
51
|
-
)
|
39
|
+
def chat_response
|
40
|
+
client = OpenAI::Client.new
|
41
|
+
client.chat(parameters: chat_params)
|
52
42
|
rescue OpenAI::Error => e
|
53
43
|
puts "OpenAI::Error: #{e.message}"
|
54
44
|
exit
|
@@ -58,6 +48,15 @@ module Hyrum
|
|
58
48
|
exit
|
59
49
|
end
|
60
50
|
|
51
|
+
def chat_params
|
52
|
+
{
|
53
|
+
model: options[:ai_model],
|
54
|
+
response_format: { type: 'json_object' },
|
55
|
+
messages: [{ role: 'user', content: prompt}],
|
56
|
+
temperature: 0.7
|
57
|
+
}
|
58
|
+
end
|
59
|
+
|
61
60
|
def configure
|
62
61
|
OpenAI.configure do |config|
|
63
62
|
config.access_token = ENV.fetch('OPENAI_ACCESS_TOKEN') if options[:ai_service] == :openai
|
@@ -73,10 +72,6 @@ module Hyrum
|
|
73
72
|
puts "Please set the OPENAI_ACCESS_TOKEN environment variable."
|
74
73
|
exit
|
75
74
|
end
|
76
|
-
|
77
|
-
def canned_content
|
78
|
-
FakeGenerator::FAKE_MESSAGES
|
79
|
-
end
|
80
75
|
end
|
81
76
|
end
|
82
77
|
end
|
data/lib/hyrum/script_options.rb
CHANGED
@@ -3,6 +3,8 @@
|
|
3
3
|
require 'optparse'
|
4
4
|
|
5
5
|
module Hyrum
|
6
|
+
class ScriptOptionsError < StandardError; end
|
7
|
+
|
6
8
|
class ScriptOptions
|
7
9
|
MANDATORY_OPTIONS = %i[message].freeze
|
8
10
|
|
@@ -19,22 +21,23 @@ module Hyrum
|
|
19
21
|
parser.parse!(@args)
|
20
22
|
end
|
21
23
|
enforce_mandatory_options
|
24
|
+
set_dynamic_defaults
|
22
25
|
options
|
23
26
|
rescue OptionParser::InvalidOption => e
|
24
|
-
|
27
|
+
raise ScriptOptionsError.new("Invalid option: #{e.message}")
|
25
28
|
rescue OptionParser::MissingArgument => e
|
26
|
-
|
29
|
+
raise ScriptOptionsError.new("Missing argument for option: #{e.message}")
|
27
30
|
rescue OptionParser::InvalidArgument => e
|
28
|
-
|
29
|
-
ensure
|
30
|
-
if err
|
31
|
-
puts err
|
32
|
-
exit
|
33
|
-
end
|
31
|
+
raise ScriptOptionsError.new("Invalid argument for option: #{e.message}")
|
34
32
|
end
|
35
33
|
|
36
34
|
private
|
37
35
|
|
36
|
+
def set_dynamic_defaults
|
37
|
+
default_model = Generators::AI_MODEL_DEFAULTS[options[:ai_service]]
|
38
|
+
options[:ai_model] ||= default_model
|
39
|
+
end
|
40
|
+
|
38
41
|
def enforce_mandatory_options
|
39
42
|
missing = MANDATORY_OPTIONS.select { |param| options[param].nil? }
|
40
43
|
return if missing.empty?
|
@@ -66,31 +69,37 @@ module Hyrum
|
|
66
69
|
end
|
67
70
|
|
68
71
|
def ai_service_options(parser)
|
69
|
-
|
72
|
+
options[:ai_service] = :fake
|
73
|
+
|
74
|
+
description = "AI service: one of #{Generators::AI_SERVICES.join(', ')} (default: fake)"
|
70
75
|
parser.on('-s SERVICE', '--service SERVICE', Generators::AI_SERVICES, description) do |service|
|
71
76
|
options[:ai_service] = service.to_sym
|
72
77
|
end
|
73
|
-
options[:ai_service] ||= :fake
|
74
78
|
|
75
|
-
default_model = Generators::AI_MODEL_DEFAULTS[options[:ai_service]]
|
76
79
|
description = 'AI model: must be a valid model for the selected service'
|
77
80
|
parser.on('-d MODEL', '--model MODEL', description) do |model|
|
78
81
|
options[:ai_model] = model.to_sym
|
79
82
|
end
|
80
|
-
options[:ai_model] ||= default_model
|
81
83
|
end
|
82
84
|
|
83
85
|
def message_key_options(parser)
|
84
|
-
|
85
|
-
|
86
|
+
options[:key] = :status
|
87
|
+
|
88
|
+
parser.on('-k KEY', '--key KEY', 'Message key (default: status)') do |key|
|
89
|
+
options[:key] = key.to_sym
|
86
90
|
end
|
87
|
-
options[:key] ||= 'status'
|
88
91
|
end
|
89
92
|
|
90
93
|
def message_options(parser)
|
91
|
-
parser.on('-m MESSAGE', '--message MESSAGE', 'Status message') do |message|
|
94
|
+
parser.on('-m MESSAGE', '--message MESSAGE', 'Status message (required)') do |message|
|
92
95
|
options[:message] = message
|
93
96
|
end
|
97
|
+
|
98
|
+
options[:number] = 5
|
99
|
+
|
100
|
+
parser.on('-n NUMBER', '--number NUMBER', Integer, 'Number of messages to generate (default: 5)',) do |number|
|
101
|
+
options[:number] = number.to_i
|
102
|
+
end
|
94
103
|
end
|
95
104
|
|
96
105
|
def verbosity_options(parser)
|
@@ -100,13 +109,14 @@ module Hyrum
|
|
100
109
|
end
|
101
110
|
|
102
111
|
def format_options(parser)
|
112
|
+
options[:format] = :text
|
113
|
+
|
103
114
|
formats = Formats::FORMATS
|
104
115
|
description = 'Output format. Supported formats are:'
|
105
116
|
supported = formats.join(', ')
|
106
|
-
parser.on('-f FORMAT', '--format FORMAT', formats, description, supported) do |format|
|
117
|
+
parser.on('-f FORMAT', '--format FORMAT', formats, description, supported, "(default: text)") do |format|
|
107
118
|
options[:format] = format
|
108
119
|
end
|
109
|
-
options[:format] ||= :text
|
110
120
|
end
|
111
121
|
end
|
112
122
|
end
|
data/lib/hyrum/version.rb
CHANGED
data/lib/hyrum.rb
CHANGED
@@ -8,7 +8,7 @@ loader.setup
|
|
8
8
|
module Hyrum
|
9
9
|
def self.run(args)
|
10
10
|
options = ScriptOptions.new(args).parse
|
11
|
-
generator_opts = options.slice(:message, :key, :ai_service, :ai_model, :verbose)
|
11
|
+
generator_opts = options.slice(:message, :key, :ai_service, :ai_model, :number, :verbose)
|
12
12
|
formatter_opts = options.slice(:format, :verbose)
|
13
13
|
|
14
14
|
puts "Options: #{options.inspect}" if options[:verbose]
|
@@ -17,6 +17,9 @@ module Hyrum
|
|
17
17
|
messages = message_generator.generate
|
18
18
|
output = formatter.format(messages)
|
19
19
|
puts output
|
20
|
+
rescue ScriptOptionsError => e
|
21
|
+
puts e.message
|
22
|
+
exit
|
20
23
|
end
|
21
24
|
end
|
22
25
|
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: hyrum
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0
|
4
|
+
version: 0.1.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Tracy Atteberry
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-12-
|
11
|
+
date: 2024-12-17 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: ruby-openai
|
@@ -46,7 +46,9 @@ executables:
|
|
46
46
|
extensions: []
|
47
47
|
extra_rdoc_files:
|
48
48
|
- README.md
|
49
|
+
- CHANGELOG.md
|
49
50
|
files:
|
51
|
+
- CHANGELOG.md
|
50
52
|
- README.md
|
51
53
|
- bin/console
|
52
54
|
- bin/hyrum
|
@@ -85,7 +87,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
85
87
|
requirements:
|
86
88
|
- - ">="
|
87
89
|
- !ruby/object:Gem::Version
|
88
|
-
version: 3.
|
90
|
+
version: 3.2.0
|
89
91
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
90
92
|
requirements:
|
91
93
|
- - ">="
|