great_axe 0.1.1 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: ef52211499f7510ab55e0422a50deedeec536b704f2eb28a8c8d15180ce4e87b
4
- data.tar.gz: 0b50a6271a1d86fea00964414de14c188703c795e4e692dbbc2da3701b73214c
3
+ metadata.gz: d270b833b6dfb6ce3a7c341830007bea1bcca75933a636ac496524573289e9c9
4
+ data.tar.gz: bab9c39186f5793eb424eced4f600dd898139cea69b22b68e6df01acdd798189
5
5
  SHA512:
6
- metadata.gz: 885c091e4159b2f5e874ce3ecb11cd4e457330cefcf9b80d278ce698c4678328b41943172df1189d7880a91d1054b5bcc73dda34b8d9c49c3a5e6c5c7a17608b
7
- data.tar.gz: e04aad5cd8d1ef2441a340e0302cb9635cc410c3f9ed07430d78d64a335427c59e86a1bab4a4b46eed2c7919f3dfafb182b408cfc9375564861116bcc165cff0
6
+ metadata.gz: 6e74485966e9a1493c9e19273b8fd03029d1cdb308f786606fa1a61061b4d6df64467e263beb51fcd7cc32590b009cbf5a77dee25229bfa81bc50f4a7c0a4480
7
+ data.tar.gz: 311f400e26707d1760e003a8bfb4c46011d0abba1f6d6224ab7132f1e35ccacfb2541873de5d242af6ecac0f08937970e715f3658e03ed84e2a869f4e9a3ae22
@@ -0,0 +1,110 @@
1
+ require 'openai'
2
+ require 'yaml'
3
+
4
+ module GreatAxe
5
+ module AiSupport
6
+ module OpenAiHelper
7
+ class << self
8
+ def client
9
+ @client ||= create_client
10
+ end
11
+
12
+ def create_client
13
+ configure_client
14
+ OpenAI::Client.new
15
+ end
16
+
17
+ def configure_client
18
+ OpenAI.configure do |config|
19
+ config.access_token = ENV.fetch('OPENAI_ACCESS_TOKEN')
20
+ config.organization_id = ENV.fetch('OPENAI_ORGANIZATION_ID', nil)
21
+ end
22
+ end
23
+
24
+ def input(request)
25
+ client.chat(
26
+ parameters: {
27
+ model: model,
28
+ messages: [{ role: role, content: request }],
29
+ temperature: temperature
30
+ })
31
+ end
32
+
33
+ def create_file(options)
34
+ path, request, choice = options.values_at(:path, :request, :choice)
35
+ File.write(path, output(request: request, choice: choice))
36
+ end
37
+
38
+ def output(options)
39
+ request, choice = options.values_at(:request, :choice)
40
+ choice ||= 0
41
+ extract_text(input(request), 'choices', choice, 'message', 'content')
42
+ end
43
+
44
+ def edit_file(options)
45
+ path, request, choice = options.values_at(:path, :request, :choice)
46
+ content = File.read(path)
47
+ response = edit(content: content, request: request)
48
+ File.write(path, extract_text(response, 'choices', choice, 'text'))
49
+ end
50
+
51
+ def edit(options)
52
+ content, request = options.values_at(:content, :request)
53
+ client.edits(
54
+ parameters: {
55
+ model: @model,
56
+ input: content,
57
+ instruction: request
58
+ }
59
+ )
60
+ end
61
+
62
+ def update_setting(key, value)
63
+ raise 'Invalid key' unless %w[model temperature role].include?(key)
64
+
65
+ load_settings[key] = value
66
+ overwrite_settings
67
+ end
68
+
69
+ def list_models
70
+ client.models.list
71
+ end
72
+
73
+ def model_info(model)
74
+ client.models.retrieve(id: model)
75
+ end
76
+
77
+ private
78
+
79
+ SETTINGS_PATH = './ai_settings.yml'
80
+
81
+ def model
82
+ @model ||= load_settings['model']
83
+ end
84
+
85
+ def temperature
86
+ @temperature ||= load_settings['temperature']
87
+ @temperature.is_a?(String) ? @temperature.to_f : @temperature
88
+ end
89
+
90
+ def role
91
+ @role ||= load_settings['role']
92
+ end
93
+
94
+ def extract_text(response, *keys)
95
+ response.dig(*keys)
96
+ end
97
+
98
+ def load_settings
99
+ return pp 'Please create a ai_settings.yml file' unless File.exist?(SETTINGS_PATH)
100
+
101
+ @load_settings ||= YAML.load_file(SETTINGS_PATH)
102
+ end
103
+
104
+ def overwrite_settings
105
+ File.open(SETTINGS_PATH, 'w') { |file| YAML.dump(load_settings, file) }
106
+ end
107
+ end
108
+ end
109
+ end
110
+ end
@@ -0,0 +1,20 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'thor'
4
+
5
+ module GreatAxe
6
+ module AiSupport
7
+ class SettingsGenerator < Thor::Group
8
+ include Thor::Actions
9
+
10
+ def self.source_paths
11
+ base_path = File.dirname(__FILE__)
12
+ %W[#{base_path}/templates ]
13
+ end
14
+
15
+ def generate_settings
16
+ template('settings.tt', './ai_settings.yml')
17
+ end
18
+ end
19
+ end
20
+ end
@@ -0,0 +1,3 @@
1
+ model: gpt-4o
2
+ temperature: 0.7
3
+ role: user
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module GreatAxe
4
- VERSION = '0.1.1'
4
+ VERSION = '0.1.3'
5
5
  end
data/lib/great_axe.rb CHANGED
@@ -1,10 +1,19 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require 'thor'
4
- require_relative 'great_axe/open_ai_helper'
4
+ require_relative 'great_axe/ai_support/open_ai_helper'
5
+ require_relative 'great_axe/ai_support/settings_generator'
5
6
 
6
7
  module GreatAxe
7
8
  class PluginCommands < Thor
9
+ include GreatAxe::AiSupport
10
+
11
+ desc 'setup', 'creates a ai_settings.yml file in the root of your project'
12
+
13
+ def setup
14
+ SettingsGenerator.start
15
+ end
16
+
8
17
  desc 'make [REQUEST]', 'Uses open AI to create a file or generate output'
9
18
  option :path,
10
19
  type: :string, required: false, desc: 'The path where your file will be created', aliases: '-p'
@@ -27,39 +36,55 @@ module GreatAxe
27
36
  end
28
37
  end
29
38
 
30
- desc 'cucumber [NAME]', 'Creates feature and step files only using open ai'
31
- option :prompt,
39
+ desc 'cucumber [PROMPT]', 'Creates feature and step files only using open ai'
40
+ option :path,
32
41
  type: :string,
33
- required: true, desc: 'The prompt for open ai', aliases: '-p'
42
+ required: false, desc: 'The path where your steps will be created', aliases: '-p'
34
43
 
35
- def cucumber(name)
36
- feature_path = "features/#{name}.feature"
37
- make(options[:prompt], feature_path)
38
- prompt_step = "create cucumber steps for the following scenarios in ruby #{File.read(feature_path)}"
39
- make(prompt_step, "features/step_definitions/#{name}_steps.rb")
44
+ def cucumber(prompt)
45
+ feature_path = "features/#{options[:path]}.feature" if options[:path]
46
+ feature_prompt = "create cucumber feature file in ruby for #{prompt}"
47
+ make(feature_prompt, feature_path)
48
+ prompt_step = "create ruby cucumber step definitions in ruby for #{prompt}"
49
+ step_path = "features/step_definitions/#{options[:path]}_steps.rb" if options[:path]
50
+ make(prompt_step, step_path)
40
51
  end
41
52
 
42
- desc 'steps [NAME]', 'Creates a new step definitions file'
53
+ desc 'steps [PROMPT]', 'Creates a new step definitions file'
43
54
  option :path,
44
55
  type: :string,
45
56
  required: false, desc: 'The path where your steps will be created', aliases: '-p'
46
- option :prompt,
47
- type: :string, required: false,
48
- desc: 'This will create the selected steps based on your prompt using open ai', aliases: '-pr'
49
- option :input,
50
- type: :string,
51
- required: false, desc: 'It uses a file as input to create the steps', aliases: '-i'
52
-
53
- def steps(name)
54
- path = 'features/step_definitions'
55
- input = options[:input]
56
- if input
57
- prompt = options[:prompt] || 'create cucumber steps for the following scenarios in ruby'
58
- content = "#{prompt} #{File.read(input)}"
59
- make(content, "#{path}/#{name}_steps.rb")
60
- else
61
- make(options[:open_ai], "#{path}/#{name}_steps.rb")
62
- end
57
+
58
+ def steps(prompt)
59
+ prompt_step = "create ruby cucumber step definitions in ruby for #{prompt}"
60
+ file_path = File.expand_path(options[:path]) if options[:path]
61
+ make(prompt_step, file_path)
62
+ end
63
+
64
+ desc 'model [MODEL]', 'Sets the model for the open ai helper'
65
+
66
+ def model(model)
67
+ OpenAiHelper.update_setting('model', model)
68
+ pp "Model set to #{model}"
69
+ end
70
+
71
+ desc 'temperature [TEMPERATURE]', 'Sets the temperature for the open ai helper'
72
+
73
+ def temperature(temp)
74
+ OpenAiHelper.update_setting('temperature', temp)
75
+ pp "Temperature set to #{temp}"
76
+ end
77
+
78
+ desc 'list_models', 'Lists the available open ai models'
79
+
80
+ def list_models
81
+ pp OpenAiHelper.list_models
82
+ end
83
+
84
+ desc 'model_info [MODEL]', 'Gets information on a specific model'
85
+
86
+ def model_info(model)
87
+ pp OpenAiHelper.model_info(model)
63
88
  end
64
89
  end
65
90
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: great_axe
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.1
4
+ version: 0.1.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - aguspe
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2024-07-28 00:00:00.000000000 Z
11
+ date: 2024-07-30 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ruby-openai
@@ -51,7 +51,9 @@ files:
51
51
  - README.md
52
52
  - Rakefile
53
53
  - lib/great_axe.rb
54
- - lib/great_axe/open_ai_helper.rb
54
+ - lib/great_axe/ai_support/open_ai_helper.rb
55
+ - lib/great_axe/ai_support/settings_generator.rb
56
+ - lib/great_axe/ai_support/templates/settings.tt
55
57
  - lib/great_axe/version.rb
56
58
  - sig/great_axe.rbs
57
59
  - sig/lib/great_axe/open_ai_helper.rbs
@@ -1,66 +0,0 @@
1
- require 'openai'
2
-
3
- module OpenAiHelper
4
- class << self
5
- def client
6
- @client ||= create_client
7
- end
8
-
9
- def create_client
10
- configure_client
11
- OpenAI::Client.new
12
- end
13
-
14
- def configure_client
15
- OpenAI.configure do |config|
16
- config.access_token = ENV.fetch('OPENAI_ACCESS_TOKEN')
17
- config.organization_id = ENV.fetch('OPENAI_ORGANIZATION_ID', nil)
18
- end
19
- end
20
-
21
- def input(request, model = 'gpt-3.5-turbo', temperature = 0.7)
22
- client.chat(
23
- parameters: {
24
- model: model,
25
- messages: [{ role: 'user', content: request }],
26
- temperature: temperature
27
- })
28
- end
29
-
30
- def create_file(options)
31
- path, request, choice = options.values_at(:path, :request, :choice)
32
- File.write(path, output(request: request, choice: choice))
33
- end
34
-
35
- def output(options)
36
- request, choice = options.values_at(:request, :choice)
37
- choice ||= 0
38
- extract_text(input(request), 'choices', choice, 'message', 'content')
39
- end
40
-
41
- def edit_file(options)
42
- path, request, choice = options.values_at(:path, :request, :choice)
43
- content = File.read(path)
44
- response = edit(content: content, request: request)
45
- File.write(path, extract_text(response, 'choices', choice, 'text'))
46
- end
47
-
48
- def edit(options)
49
- content, request, model = options.values_at(:content, :request, :model)
50
- model ||= 'text-davinci-edit-001'
51
- client.edits(
52
- parameters: {
53
- model: model,
54
- input: content,
55
- instruction: request
56
- }
57
- )
58
- end
59
-
60
- private
61
-
62
- def extract_text(response, *keys)
63
- response.dig(*keys)
64
- end
65
- end
66
- end