great_axe 0.1.0 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/great_axe/ai_support/open_ai_helper.rb +102 -0
- data/lib/great_axe/ai_support/settings_generator.rb +20 -0
- data/lib/great_axe/ai_support/templates/settings.tt +3 -0
- data/lib/great_axe/version.rb +1 -1
- data/lib/great_axe.rb +37 -27
- metadata +5 -3
- data/lib/great_axe/open_ai_helper.rb +0 -66
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 1b003919a105be25d6ef0dbf50a2e831ebd3ae65542b96ad95c793c4f068580e
|
4
|
+
data.tar.gz: a009c9fec6d4a0c0e2c7179cb9127e4c4854622c55ac7aff135961df98dc2a18
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: b8caefcb6eb71b358e8fc970e66370055d15d4d191532e4c1c30cf27edb14fad3c036914fbb241643174f7009a8d9a347abf03fabcb1f6eaa8c5cdecab65d6bd
|
7
|
+
data.tar.gz: a99a99b0f0c0e950e32a13e0567839789e0e03df19673e92e1d50acacb3895bf120adca231a485db0090c88f2a42e3a1988708bd767aa708bc3c6f79a5dce9db
|
@@ -0,0 +1,102 @@
|
|
1
|
+
require 'openai'
|
2
|
+
require 'yaml'
|
3
|
+
|
4
|
+
module GreatAxe
|
5
|
+
module AiSupport
|
6
|
+
module OpenAiHelper
|
7
|
+
class << self
|
8
|
+
def client
|
9
|
+
@client ||= create_client
|
10
|
+
end
|
11
|
+
|
12
|
+
def create_client
|
13
|
+
configure_client
|
14
|
+
OpenAI::Client.new
|
15
|
+
end
|
16
|
+
|
17
|
+
def configure_client
|
18
|
+
OpenAI.configure do |config|
|
19
|
+
config.access_token = ENV.fetch('OPENAI_ACCESS_TOKEN')
|
20
|
+
config.organization_id = ENV.fetch('OPENAI_ORGANIZATION_ID', nil)
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
def input(request)
|
25
|
+
client.chat(
|
26
|
+
parameters: {
|
27
|
+
model: model,
|
28
|
+
messages: [{ role: role, content: request }],
|
29
|
+
temperature: temperature
|
30
|
+
})
|
31
|
+
end
|
32
|
+
|
33
|
+
def create_file(options)
|
34
|
+
path, request, choice = options.values_at(:path, :request, :choice)
|
35
|
+
File.write(path, output(request: request, choice: choice))
|
36
|
+
end
|
37
|
+
|
38
|
+
def output(options)
|
39
|
+
request, choice = options.values_at(:request, :choice)
|
40
|
+
choice ||= 0
|
41
|
+
extract_text(input(request), 'choices', choice, 'message', 'content')
|
42
|
+
end
|
43
|
+
|
44
|
+
def edit_file(options)
|
45
|
+
path, request, choice = options.values_at(:path, :request, :choice)
|
46
|
+
content = File.read(path)
|
47
|
+
response = edit(content: content, request: request)
|
48
|
+
File.write(path, extract_text(response, 'choices', choice, 'text'))
|
49
|
+
end
|
50
|
+
|
51
|
+
def edit(options)
|
52
|
+
content, request = options.values_at(:content, :request)
|
53
|
+
client.edits(
|
54
|
+
parameters: {
|
55
|
+
model: @model,
|
56
|
+
input: content,
|
57
|
+
instruction: request
|
58
|
+
}
|
59
|
+
)
|
60
|
+
end
|
61
|
+
|
62
|
+
def update_setting(key, value)
|
63
|
+
raise 'Invalid key' unless %w[model temperature role].include?(key)
|
64
|
+
|
65
|
+
load_settings[key] = value
|
66
|
+
overwrite_settings
|
67
|
+
end
|
68
|
+
|
69
|
+
private
|
70
|
+
|
71
|
+
SETTINGS_PATH = './ai_settings.yml'
|
72
|
+
|
73
|
+
def model
|
74
|
+
@model ||= load_settings['model']
|
75
|
+
end
|
76
|
+
|
77
|
+
def temperature
|
78
|
+
@temperature ||= load_settings['temperature']
|
79
|
+
@temperature.is_a?(String) ? @temperature.to_f : @temperature
|
80
|
+
end
|
81
|
+
|
82
|
+
def role
|
83
|
+
@role ||= load_settings['role']
|
84
|
+
end
|
85
|
+
|
86
|
+
def extract_text(response, *keys)
|
87
|
+
response.dig(*keys)
|
88
|
+
end
|
89
|
+
|
90
|
+
def load_settings
|
91
|
+
return pp 'Please create a ai_settings.yml file' unless File.exist?(SETTINGS_PATH)
|
92
|
+
|
93
|
+
@load_settings ||= YAML.load_file(SETTINGS_PATH)
|
94
|
+
end
|
95
|
+
|
96
|
+
def overwrite_settings
|
97
|
+
File.open(SETTINGS_PATH, 'w') { |file| YAML.dump(load_settings, file) }
|
98
|
+
end
|
99
|
+
end
|
100
|
+
end
|
101
|
+
end
|
102
|
+
end
|
@@ -0,0 +1,20 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'thor'
|
4
|
+
|
5
|
+
module GreatAxe
|
6
|
+
module AiSupport
|
7
|
+
class SettingsGenerator < Thor::Group
|
8
|
+
include Thor::Actions
|
9
|
+
|
10
|
+
def self.source_paths
|
11
|
+
base_path = File.dirname(__FILE__)
|
12
|
+
%W[#{base_path}/templates ]
|
13
|
+
end
|
14
|
+
|
15
|
+
def generate_settings
|
16
|
+
template('settings.tt', './ai_settings.yml')
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
data/lib/great_axe/version.rb
CHANGED
data/lib/great_axe.rb
CHANGED
@@ -1,10 +1,18 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
require 'thor'
|
4
|
-
require_relative 'great_axe/open_ai_helper'
|
4
|
+
require_relative 'great_axe/ai_support/open_ai_helper'
|
5
|
+
require_relative 'great_axe/ai_support/settings_generator'
|
5
6
|
|
6
7
|
module GreatAxe
|
7
|
-
class
|
8
|
+
class PluginCommands < Thor
|
9
|
+
include GreatAxe::AiSupport
|
10
|
+
|
11
|
+
desc 'setup', 'creates a ai_settings.yml file in the root of your project'
|
12
|
+
def setup
|
13
|
+
SettingsGenerator.start
|
14
|
+
end
|
15
|
+
|
8
16
|
desc 'make [REQUEST]', 'Uses open AI to create a file or generate output'
|
9
17
|
option :path,
|
10
18
|
type: :string, required: false, desc: 'The path where your file will be created', aliases: '-p'
|
@@ -27,39 +35,41 @@ module GreatAxe
|
|
27
35
|
end
|
28
36
|
end
|
29
37
|
|
30
|
-
desc 'cucumber [
|
31
|
-
option :
|
38
|
+
desc 'cucumber [PROMPT]', 'Creates feature and step files only using open ai'
|
39
|
+
option :path,
|
32
40
|
type: :string,
|
33
|
-
required:
|
41
|
+
required: false, desc: 'The path where your steps will be created', aliases: '-p'
|
34
42
|
|
35
|
-
def cucumber(
|
36
|
-
feature_path = "features/#{
|
37
|
-
|
38
|
-
|
39
|
-
|
43
|
+
def cucumber(prompt)
|
44
|
+
feature_path = "features/#{options[:path]}.feature" if options[:path]
|
45
|
+
feature_prompt = "create cucumber feature file in ruby for #{prompt}"
|
46
|
+
make(feature_prompt, feature_path)
|
47
|
+
prompt_step = "create ruby cucumber step definitions in ruby for #{prompt}"
|
48
|
+
step_path = "features/step_definitions/#{options[:path]}_steps.rb" if options[:path]
|
49
|
+
make(prompt_step, step_path)
|
40
50
|
end
|
41
51
|
|
42
|
-
desc 'steps [
|
52
|
+
desc 'steps [PROMPT]', 'Creates a new step definitions file'
|
43
53
|
option :path,
|
44
54
|
type: :string,
|
45
55
|
required: false, desc: 'The path where your steps will be created', aliases: '-p'
|
46
|
-
option :prompt,
|
47
|
-
type: :string, required: false,
|
48
|
-
desc: 'This will create the selected steps based on your prompt using open ai', aliases: '-pr'
|
49
|
-
option :input,
|
50
|
-
type: :string,
|
51
|
-
required: false, desc: 'It uses a file as input to create the steps', aliases: '-i'
|
52
56
|
|
53
|
-
def steps(
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
57
|
+
def steps(prompt)
|
58
|
+
prompt_step = "create ruby cucumber step definitions in ruby for #{prompt}"
|
59
|
+
file_path = File.expand_path(options[:path]) if options[:path]
|
60
|
+
make(prompt_step, file_path)
|
61
|
+
end
|
62
|
+
|
63
|
+
desc 'model [MODEL]', 'Sets the model for the open ai helper'
|
64
|
+
def model(model)
|
65
|
+
OpenAiHelper.update_setting('model', model)
|
66
|
+
pp "Model set to #{model}"
|
67
|
+
end
|
68
|
+
|
69
|
+
desc 'temperature [TEMPERATURE]', 'Sets the temperature for the open ai helper'
|
70
|
+
def temperature(temp)
|
71
|
+
OpenAiHelper.update_setting('temperature', temp)
|
72
|
+
pp "Temperature set to #{temp}"
|
63
73
|
end
|
64
74
|
end
|
65
75
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: great_axe
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- aguspe
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-07-
|
11
|
+
date: 2024-07-30 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: ruby-openai
|
@@ -51,7 +51,9 @@ files:
|
|
51
51
|
- README.md
|
52
52
|
- Rakefile
|
53
53
|
- lib/great_axe.rb
|
54
|
-
- lib/great_axe/open_ai_helper.rb
|
54
|
+
- lib/great_axe/ai_support/open_ai_helper.rb
|
55
|
+
- lib/great_axe/ai_support/settings_generator.rb
|
56
|
+
- lib/great_axe/ai_support/templates/settings.tt
|
55
57
|
- lib/great_axe/version.rb
|
56
58
|
- sig/great_axe.rbs
|
57
59
|
- sig/lib/great_axe/open_ai_helper.rbs
|
@@ -1,66 +0,0 @@
|
|
1
|
-
require 'openai'
|
2
|
-
|
3
|
-
module OpenAiHelper
|
4
|
-
class << self
|
5
|
-
def client
|
6
|
-
@client ||= create_client
|
7
|
-
end
|
8
|
-
|
9
|
-
def create_client
|
10
|
-
configure_client
|
11
|
-
OpenAI::Client.new
|
12
|
-
end
|
13
|
-
|
14
|
-
def configure_client
|
15
|
-
OpenAI.configure do |config|
|
16
|
-
config.access_token = ENV.fetch('OPENAI_ACCESS_TOKEN')
|
17
|
-
config.organization_id = ENV.fetch('OPENAI_ORGANIZATION_ID', nil)
|
18
|
-
end
|
19
|
-
end
|
20
|
-
|
21
|
-
def input(request, model = 'gpt-3.5-turbo', temperature = 0.7)
|
22
|
-
client.chat(
|
23
|
-
parameters: {
|
24
|
-
model: model,
|
25
|
-
messages: [{ role: 'user', content: request }],
|
26
|
-
temperature: temperature
|
27
|
-
})
|
28
|
-
end
|
29
|
-
|
30
|
-
def create_file(options)
|
31
|
-
path, request, choice = options.values_at(:path, :request, :choice)
|
32
|
-
File.write(path, output(request: request, choice: choice))
|
33
|
-
end
|
34
|
-
|
35
|
-
def output(options)
|
36
|
-
request, choice = options.values_at(:request, :choice)
|
37
|
-
choice ||= 0
|
38
|
-
extract_text(input(request), 'choices', choice, 'message', 'content')
|
39
|
-
end
|
40
|
-
|
41
|
-
def edit_file(options)
|
42
|
-
path, request, choice = options.values_at(:path, :request, :choice)
|
43
|
-
content = File.read(path)
|
44
|
-
response = edit(content: content, request: request)
|
45
|
-
File.write(path, extract_text(response, 'choices', choice, 'text'))
|
46
|
-
end
|
47
|
-
|
48
|
-
def edit(options)
|
49
|
-
content, request, model = options.values_at(:content, :request, :model)
|
50
|
-
model ||= 'text-davinci-edit-001'
|
51
|
-
client.edits(
|
52
|
-
parameters: {
|
53
|
-
model: model,
|
54
|
-
input: content,
|
55
|
-
instruction: request
|
56
|
-
}
|
57
|
-
)
|
58
|
-
end
|
59
|
-
|
60
|
-
private
|
61
|
-
|
62
|
-
def extract_text(response, *keys)
|
63
|
-
response.dig(*keys)
|
64
|
-
end
|
65
|
-
end
|
66
|
-
end
|