ghostest 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.idea/misc.xml +4 -0
- data/.idea/modules.xml +8 -0
- data/.idea/vcs.xml +6 -0
- data/.rubocop.yml +236 -0
- data/.ruby-version +1 -0
- data/README.md +31 -0
- data/Rakefile +4 -0
- data/exe/ghostest +74 -0
- data/ghostest.gemspec +48 -0
- data/lib/ghostest/attr_reader.rb +11 -0
- data/lib/ghostest/config/agent.rb +30 -0
- data/lib/ghostest/config.rb +65 -0
- data/lib/ghostest/config_error.rb +5 -0
- data/lib/ghostest/error.rb +5 -0
- data/lib/ghostest/languages/ruby.rb +21 -0
- data/lib/ghostest/logger.rb +32 -0
- data/lib/ghostest/manager.rb +72 -0
- data/lib/ghostest/test_condition.rb +24 -0
- data/lib/ghostest/version.rb +3 -0
- data/lib/ghostest.rb +58 -0
- data/lib/google_custom_search.rb +30 -0
- data/lib/i18n_translator.rb +66 -0
- data/lib/initializers/i18n.rb +9 -0
- data/lib/llm/agents/base.rb +31 -0
- data/lib/llm/agents/reviewer.rb +50 -0
- data/lib/llm/agents/test_designer.rb +43 -0
- data/lib/llm/agents/test_programmer.rb +45 -0
- data/lib/llm/clients/azure_open_ai.rb +15 -0
- data/lib/llm/clients/base.rb +88 -0
- data/lib/llm/clients/open_ai.rb +14 -0
- data/lib/llm/functions/add_to_memory.rb +41 -0
- data/lib/llm/functions/base.rb +13 -0
- data/lib/llm/functions/exec_rspec_test.rb +39 -0
- data/lib/llm/functions/fix_one_rspec_test.rb +55 -0
- data/lib/llm/functions/get_files_list.rb +29 -0
- data/lib/llm/functions/get_gem_files_list.rb +43 -0
- data/lib/llm/functions/make_new_file.rb +43 -0
- data/lib/llm/functions/overwrite_file.rb +42 -0
- data/lib/llm/functions/read_file.rb +43 -0
- data/lib/llm/functions/record_lgtm.rb +48 -0
- data/lib/llm/functions/report_bug.rb +34 -0
- data/lib/llm/functions/switch_assignee.rb +74 -0
- data/lib/llm/message_container.rb +63 -0
- data/sig/ghostest.rbs +4 -0
- metadata +245 -0
@@ -0,0 +1,24 @@
|
|
1
|
+
module Ghostest
|
2
|
+
class TestCondition
|
3
|
+
def initialize(language_klass)
|
4
|
+
@language_klass = language_klass
|
5
|
+
unless File.exist?(@language_klass.test_condition_yml_path)
|
6
|
+
FileUtils.mkdir_p(File.dirname(@language_klass.test_condition_yml_path))
|
7
|
+
File.write(@language_klass.test_condition_yml_path, YAML.dump({}))
|
8
|
+
end
|
9
|
+
@test_condition = YAML.load(File.read(@language_klass.test_condition_yml_path)) || {}
|
10
|
+
end
|
11
|
+
|
12
|
+
def save_as_updated!(source_path)
|
13
|
+
source_md5 = Digest::MD5.hexdigest(File.read(source_path))
|
14
|
+
@test_condition[source_path] = { source_md5: }
|
15
|
+
|
16
|
+
File.write(@language_klass.test_condition_yml_path, YAML.dump(@test_condition))
|
17
|
+
end
|
18
|
+
|
19
|
+
def should_update_test?(source_path)
|
20
|
+
source_md5 = Digest::MD5.hexdigest(File.read(source_path))
|
21
|
+
@test_condition[source_path].nil? || @test_condition[source_path][:source_md5] != source_md5
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
data/lib/ghostest.rb
ADDED
@@ -0,0 +1,58 @@
|
|
1
|
+
# don't need to test this file
|
2
|
+
require_relative "ghostest/version"
|
3
|
+
|
4
|
+
require 'digest'
|
5
|
+
|
6
|
+
require 'openai'
|
7
|
+
require 'html2markdown'
|
8
|
+
require 'addressable'
|
9
|
+
require 'baran'
|
10
|
+
require 'tiktoken_ruby'
|
11
|
+
require 'google-apis-customsearch_v1'
|
12
|
+
require 'colorize'
|
13
|
+
require 'pry'
|
14
|
+
require 'i18n'
|
15
|
+
unless defined?(HashWithIndifferentAccess)
|
16
|
+
require 'indifference'
|
17
|
+
end
|
18
|
+
require 'erb'
|
19
|
+
require 'bundler'
|
20
|
+
|
21
|
+
require "ghostest/attr_reader"
|
22
|
+
require "ghostest/config/agent"
|
23
|
+
require "ghostest/config"
|
24
|
+
require "ghostest/config_error"
|
25
|
+
require "ghostest/error"
|
26
|
+
require "ghostest/languages/ruby"
|
27
|
+
require "ghostest/logger"
|
28
|
+
require "ghostest/manager"
|
29
|
+
require "ghostest/test_condition"
|
30
|
+
require "ghostest/version"
|
31
|
+
require "ghostest"
|
32
|
+
require "google_custom_search"
|
33
|
+
require "i18n_translator"
|
34
|
+
require "initializers/i18n"
|
35
|
+
|
36
|
+
require "llm/message_container"
|
37
|
+
require "llm/agents/base"
|
38
|
+
require "llm/agents/reviewer"
|
39
|
+
require "llm/agents/test_designer"
|
40
|
+
require "llm/agents/test_programmer"
|
41
|
+
require "llm/clients/base"
|
42
|
+
require "llm/clients/azure_open_ai"
|
43
|
+
require "llm/clients/open_ai"
|
44
|
+
require "llm/functions/base"
|
45
|
+
require "llm/functions/add_to_memory"
|
46
|
+
require "llm/functions/exec_rspec_test"
|
47
|
+
require "llm/functions/fix_one_rspec_test"
|
48
|
+
require "llm/functions/get_files_list"
|
49
|
+
require "llm/functions/get_gem_files_list"
|
50
|
+
require "llm/functions/make_new_file"
|
51
|
+
require "llm/functions/overwrite_file"
|
52
|
+
require "llm/functions/read_file"
|
53
|
+
require "llm/functions/record_lgtm"
|
54
|
+
require "llm/functions/report_bug"
|
55
|
+
require "llm/functions/switch_assignee"
|
56
|
+
|
57
|
+
module Ghostest
|
58
|
+
end
|
@@ -0,0 +1,30 @@
|
|
1
|
+
class GoogleCustomSearch
|
2
|
+
def search(query, args = {})
|
3
|
+
service.list_cses(cx: ENV['GOOGLE_CUSTOM_SEARCH_CSE_ID'], q: query, **args)
|
4
|
+
end
|
5
|
+
|
6
|
+
def service
|
7
|
+
@service ||= begin
|
8
|
+
service = Google::Apis::CustomsearchV1::CustomSearchAPIService.new
|
9
|
+
authorizer = make_authorizer
|
10
|
+
authorizer.fetch_access_token!
|
11
|
+
service.authorization = authorizer
|
12
|
+
service
|
13
|
+
end
|
14
|
+
end
|
15
|
+
|
16
|
+
def make_authorizer
|
17
|
+
sa_key = ENV.fetch("GOOGLE_SA_PRIVATE_KEY")
|
18
|
+
key = ::OpenSSL::PKey::RSA.new(sa_key)
|
19
|
+
cred = ::Signet::OAuth2::Client.new(
|
20
|
+
token_credential_uri: "https://oauth2.googleapis.com/token",
|
21
|
+
audience: "https://oauth2.googleapis.com/token",
|
22
|
+
scope: %w[
|
23
|
+
https://www.googleapis.com/auth/cse
|
24
|
+
],
|
25
|
+
issuer: ENV.fetch("GOOGLE_SA_CLIENT_EMAIL"),
|
26
|
+
signing_key: key
|
27
|
+
)
|
28
|
+
cred.configure_connection({})
|
29
|
+
end
|
30
|
+
end
|
@@ -0,0 +1,66 @@
|
|
1
|
+
require 'digest'
|
2
|
+
class I18nTranslator
|
3
|
+
def self.update_dictionary!(from_locale, to_locales)
|
4
|
+
from_file_paths = I18n.load_path.select { |path| path.match(/#{from_locale}\.yml$/) }
|
5
|
+
from_file_paths.each do |from_file_path|
|
6
|
+
to_locales.each do |to_locale|
|
7
|
+
from_hash = {
|
8
|
+
to_locale.to_s => YAML.load(File.read(from_file_path))[from_locale.to_s],
|
9
|
+
}
|
10
|
+
to_file_path = from_file_path.gsub(/#{from_locale}\.yml$/, "#{to_locale}.yml")
|
11
|
+
to_hash = File.exist?(to_file_path) ? YAML.load(File.read(to_file_path)) : {}
|
12
|
+
to_hash = deep_translate(from_hash, to_hash, from_locale, to_locale)
|
13
|
+
File.write(to_file_path, YAML.dump(to_hash))
|
14
|
+
end
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
def self.deep_translate(from_hash, to_hash, from_locale, to_locale)
|
19
|
+
ret = {}
|
20
|
+
from_hash.each do |key, val|
|
21
|
+
if val.is_a?(Hash)
|
22
|
+
h = deep_translate(val, to_hash[key] || {}, from_locale, to_locale)
|
23
|
+
ret[key] = Hash[h.sort]
|
24
|
+
else
|
25
|
+
md5_key = "#{key}_md5"
|
26
|
+
md5 = ::Digest::MD5.hexdigest(val)
|
27
|
+
if to_hash[md5_key].nil? || to_hash[md5_key] != md5
|
28
|
+
ret[md5_key] = md5
|
29
|
+
ret[key] = translate(val)
|
30
|
+
else
|
31
|
+
ret[md5_key] = to_hash[md5_key]
|
32
|
+
ret[key] = to_hash[key]
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
ret
|
37
|
+
end
|
38
|
+
|
39
|
+
def self.translator
|
40
|
+
@translator ||= ::Llm::Clients::AzureOpenAi.new
|
41
|
+
end
|
42
|
+
|
43
|
+
# don't need to test this method
|
44
|
+
def self.translate(str)
|
45
|
+
# DeepLのほうが望ましいがLLMで代用
|
46
|
+
ret = self.translator.chat(parameters: {
|
47
|
+
messages: [
|
48
|
+
{
|
49
|
+
role: "system",
|
50
|
+
content: "You are an excellent translator. We translate strings sent by users into accurate English.\n" + \
|
51
|
+
"We do not output any content other than the translation.\n" + \
|
52
|
+
"Please keep the position and number of the new line code(\\n).\n" + \
|
53
|
+
"Never omit the line feed code at the end of a sentence.",
|
54
|
+
},
|
55
|
+
{
|
56
|
+
role: "user",
|
57
|
+
content: str,
|
58
|
+
},
|
59
|
+
],
|
60
|
+
})
|
61
|
+
translated = ret.dig("choices", 0, "message", "content")
|
62
|
+
puts("#{str} => #{translated.green}")
|
63
|
+
sleep(1)
|
64
|
+
translated
|
65
|
+
end
|
66
|
+
end
|
@@ -0,0 +1,31 @@
|
|
1
|
+
module Llm
|
2
|
+
module Agents
|
3
|
+
class Base
|
4
|
+
include AttrReader
|
5
|
+
attr_reader :name, :config, :agent_config
|
6
|
+
|
7
|
+
def initialize(name, config, logger)
|
8
|
+
@name = name
|
9
|
+
@config = config
|
10
|
+
@agent_config = config.agents[name]
|
11
|
+
@logger = logger
|
12
|
+
end
|
13
|
+
|
14
|
+
def name_with_type
|
15
|
+
"#{self.name}(#{self.agent_config.role})"
|
16
|
+
end
|
17
|
+
|
18
|
+
def say(message, color: nil)
|
19
|
+
if color.nil?
|
20
|
+
@logger.info("#{self.name}: #{message}".send(self.agent_config.color))
|
21
|
+
else
|
22
|
+
if color
|
23
|
+
@logger.info("#{self.name}: #{message}".send(color))
|
24
|
+
else
|
25
|
+
@logger.info("#{self.name}: #{message}")
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
@@ -0,0 +1,50 @@
|
|
1
|
+
module Llm
|
2
|
+
module Agents
|
3
|
+
class Reviewer < Base
|
4
|
+
def initialize(name, config, logger)
|
5
|
+
super(name, config, logger)
|
6
|
+
@record_lgtm_function = Llm::Functions::RecordLgtm.new
|
7
|
+
end
|
8
|
+
|
9
|
+
def lgtm?
|
10
|
+
@record_lgtm_function.lgtm?
|
11
|
+
end
|
12
|
+
|
13
|
+
# skip test for this method
|
14
|
+
def work(
|
15
|
+
source_path: raise("source_path is required"),
|
16
|
+
test_path: nil,
|
17
|
+
switch_assignee_function: raise("switch_assignee_function is required"))
|
18
|
+
say("start to work for #{source_path}")
|
19
|
+
|
20
|
+
message_container = Llm::MessageContainer.new
|
21
|
+
test_path ||= self.config.language_klass.convert_source_path_to_test_path(source_path)
|
22
|
+
message_container.add_system_message(self.agent_config.system_prompt.gsub("%{source_path}", source_path).gsub("%{test_path}", test_path))
|
23
|
+
|
24
|
+
if switch_assignee_function.messages.size > 0
|
25
|
+
message_container.add_system_message(I18n.t('ghostest.agents.reviewer.last_assignee_comment',
|
26
|
+
last_assignee: switch_assignee_function.last_assignee,
|
27
|
+
comment: switch_assignee_function.last_message))
|
28
|
+
end
|
29
|
+
|
30
|
+
azure_open_ai = Llm::Clients::AzureOpenAi.new
|
31
|
+
io = azure_open_ai.chat_with_function_calling_loop(
|
32
|
+
messages: message_container,
|
33
|
+
functions: [
|
34
|
+
@record_lgtm_function,
|
35
|
+
Llm::Functions::GetFilesList.new,
|
36
|
+
Llm::Functions::ReadFile.new,
|
37
|
+
Llm::Functions::AddToMemory.new(message_container),
|
38
|
+
switch_assignee_function,
|
39
|
+
|
40
|
+
] + self.config.language_klass.create_functions,
|
41
|
+
agent: self,
|
42
|
+
)
|
43
|
+
|
44
|
+
comment = io.rewind && io.read
|
45
|
+
say(comment)
|
46
|
+
comment
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
@@ -0,0 +1,43 @@
|
|
1
|
+
module Llm
|
2
|
+
module Agents
|
3
|
+
class TestDesigner < Base
|
4
|
+
|
5
|
+
# skip test for this method
|
6
|
+
def work(
|
7
|
+
source_path: raise("source_path is required"),
|
8
|
+
test_path: nil,
|
9
|
+
switch_assignee_function: raise("switch_assignee_function is required"))
|
10
|
+
|
11
|
+
say("start to work for #{source_path}")
|
12
|
+
|
13
|
+
message_container = Llm::MessageContainer.new
|
14
|
+
test_path ||= self.config.language_klass.convert_source_path_to_test_path(source_path)
|
15
|
+
message_container.add_system_message(self.agent_config.system_prompt.gsub("%{source_path}", source_path).gsub("%{test_path}", test_path))
|
16
|
+
|
17
|
+
if switch_assignee_function.messages.size > 0
|
18
|
+
message_container.add_system_message(I18n.t('ghostest.agents.test_designer.last_assignee_comment',
|
19
|
+
last_assignee: switch_assignee_function.last_assignee,
|
20
|
+
comment: switch_assignee_function.last_message))
|
21
|
+
end
|
22
|
+
|
23
|
+
azure_open_ai = Llm::Clients::AzureOpenAi.new
|
24
|
+
io = azure_open_ai.chat_with_function_calling_loop(
|
25
|
+
messages: message_container,
|
26
|
+
functions: [
|
27
|
+
Llm::Functions::GetFilesList.new,
|
28
|
+
Llm::Functions::ReadFile.new,
|
29
|
+
Llm::Functions::AddToMemory.new(message_container),
|
30
|
+
Llm::Functions::ReportBug.new,
|
31
|
+
switch_assignee_function,
|
32
|
+
|
33
|
+
] + self.config.language_klass.create_functions,
|
34
|
+
agent: self,
|
35
|
+
)
|
36
|
+
|
37
|
+
comment = io.rewind && io.read
|
38
|
+
say(comment)
|
39
|
+
comment
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
@@ -0,0 +1,45 @@
|
|
1
|
+
module Llm
|
2
|
+
module Agents
|
3
|
+
class TestProgrammer < Base
|
4
|
+
|
5
|
+
# skip test for this method
|
6
|
+
def work(
|
7
|
+
source_path: raise("source_path is required"),
|
8
|
+
test_path: nil,
|
9
|
+
switch_assignee_function: raise("switch_assignee_function is required"))
|
10
|
+
|
11
|
+
say("start to work for #{source_path}")
|
12
|
+
|
13
|
+
message_container = Llm::MessageContainer.new
|
14
|
+
test_path ||= self.config.language_klass.convert_source_path_to_test_path(source_path)
|
15
|
+
message_container.add_system_message(self.agent_config.system_prompt.gsub("%{source_path}", source_path).gsub("%{test_path}", test_path))
|
16
|
+
|
17
|
+
if switch_assignee_function.messages.size > 0
|
18
|
+
message_container.add_system_message(I18n.t('ghostest.agents.test_programmer.last_assignee_comment',
|
19
|
+
last_assignee: switch_assignee_function.last_assignee,
|
20
|
+
comment: switch_assignee_function.last_message))
|
21
|
+
end
|
22
|
+
|
23
|
+
azure_open_ai = Llm::Clients::AzureOpenAi.new
|
24
|
+
io = azure_open_ai.chat_with_function_calling_loop(
|
25
|
+
messages: message_container,
|
26
|
+
functions: [
|
27
|
+
Llm::Functions::GetFilesList.new,
|
28
|
+
Llm::Functions::ReadFile.new,
|
29
|
+
Llm::Functions::OverwriteFile.new,
|
30
|
+
Llm::Functions::MakeNewFile.new,
|
31
|
+
Llm::Functions::AddToMemory.new(message_container),
|
32
|
+
Llm::Functions::ReportBug.new,
|
33
|
+
switch_assignee_function,
|
34
|
+
|
35
|
+
] + self.config.language_klass.create_functions,
|
36
|
+
agent: self,
|
37
|
+
)
|
38
|
+
|
39
|
+
comment = io.rewind && io.read
|
40
|
+
say(comment)
|
41
|
+
comment
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
@@ -0,0 +1,15 @@
|
|
1
|
+
module Llm
|
2
|
+
module Clients
|
3
|
+
class AzureOpenAi < Llm::Clients::Base
|
4
|
+
def initialize(timeout: 30000)
|
5
|
+
@client = OpenAI::Client.new(
|
6
|
+
api_type: :azure,
|
7
|
+
api_version: ENV.fetch("AZURE_API_VERSION"),
|
8
|
+
access_token: ENV.fetch("AZURE_OPENAI_API_KEY"),
|
9
|
+
uri_base: "#{ENV.fetch("AZURE_API_BASE")}/openai/deployments/#{ENV.fetch("AZURE_DEPLOYMENT_NAME")}",
|
10
|
+
request_timeout: timeout,
|
11
|
+
)
|
12
|
+
end
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|
@@ -0,0 +1,88 @@
|
|
1
|
+
require "ghostest/attr_reader"
|
2
|
+
module Llm
|
3
|
+
module Clients
|
4
|
+
class Base
|
5
|
+
include AttrReader
|
6
|
+
attr_reader :client
|
7
|
+
|
8
|
+
def chat(parameters: {})
|
9
|
+
parameters = parameters.with_indifferent_access
|
10
|
+
if parameters[:messages].nil? || parameters[:messages].empty?
|
11
|
+
raise 'messages is required.'
|
12
|
+
end
|
13
|
+
parameters[:temperature] ||= 0.5
|
14
|
+
parameters[:top_p] ||= 1
|
15
|
+
parameters[:frequency_penalty] ||= 0
|
16
|
+
parameters[:presence_penalty] ||= 0
|
17
|
+
|
18
|
+
self.client.chat(parameters:)
|
19
|
+
end
|
20
|
+
|
21
|
+
def chat_with_function_calling_loop(**args)
|
22
|
+
agent = args.delete(:agent) || (raise 'agent is required.')
|
23
|
+
chat_message_io = StringIO.new
|
24
|
+
|
25
|
+
if args[:messages].is_a?(Llm::MessageContainer)
|
26
|
+
message_container = args[:messages]
|
27
|
+
else
|
28
|
+
message_container = Llm::MessageContainer.new
|
29
|
+
message_container.add_raw_messages(args[:messages])
|
30
|
+
end
|
31
|
+
|
32
|
+
i = 0
|
33
|
+
while (i += 1) < 20
|
34
|
+
ret = self.chat(parameters: args.merge({
|
35
|
+
messages: message_container.to_capped_messages,
|
36
|
+
functions: args[:functions].map { |f| f.definition },
|
37
|
+
}))
|
38
|
+
if ret.dig("choices", 0, "finish_reason") != 'function_call'
|
39
|
+
break
|
40
|
+
end
|
41
|
+
|
42
|
+
# Function calling
|
43
|
+
message = ret.dig("choices", 0, "message")
|
44
|
+
function = args[:functions].detect { |f| f.function_name == message['function_call']['name'].to_sym }
|
45
|
+
message_container.add_raw_message(message.merge({ content: nil }))
|
46
|
+
|
47
|
+
function_args = (JSON.parse(message.dig('function_call', 'arguments')) || {}).with_indifferent_access
|
48
|
+
agent.say(function.function_name)
|
49
|
+
agent.say(function_args, color: false)
|
50
|
+
|
51
|
+
function_result = function.execute_and_generate_message(function_args)
|
52
|
+
message_container.add_raw_message({
|
53
|
+
role: "function",
|
54
|
+
name: function.function_name,
|
55
|
+
content: JSON.dump(function_result),
|
56
|
+
})
|
57
|
+
|
58
|
+
if function.stop_llm_call?
|
59
|
+
chat_message_io.write(function_result)
|
60
|
+
return chat_message_io
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
# メッセージ表示
|
65
|
+
if content = ret.dig("choices", 0, "message", "content")
|
66
|
+
chat_message_io.write(content)
|
67
|
+
else
|
68
|
+
# エラー発生か、function callingの回数が多すぎる場合は、他エージェントに相談する
|
69
|
+
functions = args[:functions].select do |f| %w[
|
70
|
+
switch_assignee
|
71
|
+
report_bug
|
72
|
+
].include?(f.function_name.to_s) end
|
73
|
+
ret = self.chat(parameters: args.merge({
|
74
|
+
messages: message_container.to_capped_messages,
|
75
|
+
functions: functions.map { |f| f.definition },
|
76
|
+
}))
|
77
|
+
if content = ret.dig("choices", 0, "message", "content")
|
78
|
+
chat_message_io.write(content)
|
79
|
+
else
|
80
|
+
puts ret
|
81
|
+
exit 1
|
82
|
+
end
|
83
|
+
end
|
84
|
+
chat_message_io
|
85
|
+
end
|
86
|
+
end
|
87
|
+
end
|
88
|
+
end
|
@@ -0,0 +1,14 @@
|
|
1
|
+
module Llm
|
2
|
+
module Clients
|
3
|
+
class OpenAi < Llm::Clients::Base
|
4
|
+
def initialize(timeout: 300)
|
5
|
+
@client = OpenAI::Client.new(
|
6
|
+
api_version: ENV.fetch("OPENAI_API_VERSION"),
|
7
|
+
access_token: ENV.fetch("OPENAI_API_KEY"),
|
8
|
+
uri_base: "https://openai.com/openai/deployments/chat/completions",
|
9
|
+
request_timeout: timeout,
|
10
|
+
)
|
11
|
+
end
|
12
|
+
end
|
13
|
+
end
|
14
|
+
end
|
@@ -0,0 +1,41 @@
|
|
1
|
+
module Llm
|
2
|
+
module Functions
|
3
|
+
class AddToMemory < Base
|
4
|
+
def function_name
|
5
|
+
:add_to_memory
|
6
|
+
end
|
7
|
+
|
8
|
+
def initialize(message_container)
|
9
|
+
@message_container = message_container
|
10
|
+
end
|
11
|
+
|
12
|
+
def definition
|
13
|
+
return @definition unless @definition.nil?
|
14
|
+
|
15
|
+
@definition = {
|
16
|
+
name: self.function_name,
|
17
|
+
description: I18n.t("ghostest.functions.#{self.function_name}.description"),
|
18
|
+
parameters: {
|
19
|
+
type: :object,
|
20
|
+
properties: {
|
21
|
+
contents_to_memory: {
|
22
|
+
type: :string,
|
23
|
+
description: I18n.t("ghostest.functions.#{self.function_name}.parameters.contents_to_memory"),
|
24
|
+
},
|
25
|
+
},
|
26
|
+
required: [:contents_to_memory],
|
27
|
+
},
|
28
|
+
}
|
29
|
+
@definition
|
30
|
+
end
|
31
|
+
|
32
|
+
def execute_and_generate_message(args)
|
33
|
+
if args[:contents_to_memory].nil? || args[:contents_to_memory].empty?
|
34
|
+
raise "contents_to_memory is required"
|
35
|
+
end
|
36
|
+
@message_container.add_system_message(I18n.t("ghostest.functions.#{self.function_name}.system_message_prefix", contents_to_memory: args[:contents_to_memory]))
|
37
|
+
{ result: 'success' }
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
@@ -0,0 +1,39 @@
|
|
1
|
+
module Llm
|
2
|
+
module Functions
|
3
|
+
class ExecRspecTest < Base
|
4
|
+
def function_name
|
5
|
+
:exec_rspec_test
|
6
|
+
end
|
7
|
+
|
8
|
+
def definition
|
9
|
+
return @definition unless @definition.nil?
|
10
|
+
|
11
|
+
@definition = {
|
12
|
+
name: self.function_name,
|
13
|
+
description: I18n.t("ghostest.functions.#{self.function_name}.description"),
|
14
|
+
parameters: {
|
15
|
+
type: :object,
|
16
|
+
properties: {
|
17
|
+
file_or_dir_path: {
|
18
|
+
type: :string,
|
19
|
+
description: I18n.t("ghostest.functions.#{self.function_name}.parameters.file_or_dir_path"),
|
20
|
+
},
|
21
|
+
},
|
22
|
+
required: [:file_or_dir_path],
|
23
|
+
},
|
24
|
+
}
|
25
|
+
@definition
|
26
|
+
end
|
27
|
+
|
28
|
+
def execute_and_generate_message(args)
|
29
|
+
if args[:file_or_dir_path].nil? || args[:file_or_dir_path].empty?
|
30
|
+
raise Ghostest::Error.new("Please specify the file or directory path.")
|
31
|
+
end
|
32
|
+
script = "bundle exec rspec '#{args['file_or_dir_path']}'"
|
33
|
+
stdout, stderr, status = Open3.capture3(script)
|
34
|
+
|
35
|
+
{ stdout:, stderr:, exit_status: status.exitstatus }
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
@@ -0,0 +1,55 @@
|
|
1
|
+
module Llm
|
2
|
+
module Functions
|
3
|
+
class FixOneRspecTest < Base
|
4
|
+
def function_name
|
5
|
+
:fix_one_rspec_test
|
6
|
+
end
|
7
|
+
|
8
|
+
def definition
|
9
|
+
return @definition unless @definition.nil?
|
10
|
+
|
11
|
+
@definition = {
|
12
|
+
name: self.function_name,
|
13
|
+
description: I18n.t("ghostest.functions.#{self.function_name}.description"),
|
14
|
+
parameters: {
|
15
|
+
type: :object,
|
16
|
+
properties: {
|
17
|
+
file_path: {
|
18
|
+
type: :string,
|
19
|
+
description: I18n.t("ghostest.functions.#{self.function_name}.parameters.file_path"),
|
20
|
+
},
|
21
|
+
line_num: {
|
22
|
+
type: :string,
|
23
|
+
description: I18n.t("ghostest.functions.#{self.function_name}.parameters.line_num"),
|
24
|
+
},
|
25
|
+
},
|
26
|
+
required: [:file_path, :line_num],
|
27
|
+
},
|
28
|
+
}
|
29
|
+
@definition
|
30
|
+
end
|
31
|
+
|
32
|
+
def execute_and_generate_message(args)
|
33
|
+
if args[:file_path].nil? || args[:file_path].empty? || !File.exist?(args[:file_path])
|
34
|
+
raise Ghostest::Error.new("Please specify the file path.")
|
35
|
+
end
|
36
|
+
line_num = args[:line_num].to_i
|
37
|
+
if line_num < 1
|
38
|
+
raise Ghostest::Error.new("Please specify the line num. #{args[:line_num]}")
|
39
|
+
end
|
40
|
+
|
41
|
+
n = 0
|
42
|
+
while n < 5
|
43
|
+
n += 1
|
44
|
+
script = "bundle exec rspec '#{args['file_path']}:#{args['line_num']}'"
|
45
|
+
stdout, stderr, status = Open3.capture3(script)
|
46
|
+
if status.exitstatus != 0
|
47
|
+
|
48
|
+
end
|
49
|
+
end
|
50
|
+
|
51
|
+
{ stdout:, stderr:, exit_status: status.exitstatus }
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
module Llm
|
2
|
+
module Functions
|
3
|
+
class GetFilesList < Base
|
4
|
+
def function_name
|
5
|
+
:get_files_list
|
6
|
+
end
|
7
|
+
|
8
|
+
def definition
|
9
|
+
return @definition unless @definition.nil?
|
10
|
+
|
11
|
+
@definition = {
|
12
|
+
name: self.function_name,
|
13
|
+
description: I18n.t("ghostest.functions.#{self.function_name}.description"),
|
14
|
+
parameters: {
|
15
|
+
type: :object,
|
16
|
+
properties: {},
|
17
|
+
},
|
18
|
+
}
|
19
|
+
@definition
|
20
|
+
end
|
21
|
+
|
22
|
+
def execute_and_generate_message(args)
|
23
|
+
files_list = Dir.glob("**/*.{rb,yml}")
|
24
|
+
|
25
|
+
{ files_list: }
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|