active_cortex 0.1.0 → 0.2.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +41 -1
- data/lib/active_cortex/generator/has_many.rb +90 -0
- data/lib/active_cortex/generator/text.rb +34 -0
- data/lib/active_cortex/generator.rb +59 -0
- data/lib/active_cortex/model.rb +49 -27
- data/lib/active_cortex/version.rb +1 -1
- data/lib/active_cortex.rb +3 -0
- metadata +12 -9
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: ca05346fe4261874164f6fbf48a473b4e310f2cafa206721b2f0ddb2dcfdff5c
|
4
|
+
data.tar.gz: 2a0163d3e56f87d416a56a105ad3c29af488de955445180d0adafa347d7d1c99
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 5f4b74059e5887c179f7c330dd1a47aa87e68521b3e63a1283359ae4067cac187097890a52bd4b931b5b0cbcb6bdbecae16e985a9c16a708ecc3b78b77be6554
|
7
|
+
data.tar.gz: c585a12f113acf4086389fe67f20a1558e054c9f1ad3aa6e87ffb22de42f31e385ce9c883b2041a3a619bfdb95feab2d4ada8f54e0ea6b43f53713742d3d24f8
|
data/README.md
CHANGED
@@ -2,6 +2,35 @@
|
|
2
2
|
|
3
3
|
Easily add AI-generated fields to your Rails models.
|
4
4
|
|
5
|
+
## Motivation
|
6
|
+
|
7
|
+
ActiveCortex is born out of the need to easily integrate OpenAI into Rails.
|
8
|
+
|
9
|
+
Integrating with OpenAI is kind of a pain. It requires a lot of boilerplate: a
|
10
|
+
service object, dealing with OpenAI errors, defining custom functions, etc.
|
11
|
+
Also, OpenAI is constantly releasing new features, and keeping up-to-date is a
|
12
|
+
hassle.
|
13
|
+
|
14
|
+
Many developers aren't following OpenAI's best practices because they don't
|
15
|
+
have the time to develop the functionality required to follow them. For
|
16
|
+
example, it's a best practice to split complicated tasks into multiple prompts,
|
17
|
+
but some developers will avoid doing that to simplify their implementation.
|
18
|
+
This results in worse outputs. What if you could effortlessly write multi-stage
|
19
|
+
prompts and debug their performance?
|
20
|
+
|
21
|
+
We often write custom functions defining how to create a model we already have
|
22
|
+
a schema for! What if you could tell ChatGPT to provide its response in a
|
23
|
+
format that matches an ActiveRecord class?
|
24
|
+
|
25
|
+
Finally, we have to consider errors. OpenAI has downtime, sometimes returns
|
26
|
+
server errors, and ChatGPT sometimes bugs out and returns "Sorry, I can't help
|
27
|
+
you with that". What if you could remove the error handling logic from your
|
28
|
+
system?
|
29
|
+
|
30
|
+
ActiveCortex cleans up Rails codebases by providing a macro that deals with the
|
31
|
+
interface to OpenAI. (I'm still working on the above features, but that's the
|
32
|
+
vision for this gem!)
|
33
|
+
|
5
34
|
## Usage
|
6
35
|
|
7
36
|
```ruby
|
@@ -13,6 +42,14 @@ class Document < ApplicationRecord
|
|
13
42
|
# (or)
|
14
43
|
ai_generated :summary, prompt: :generate_summary_prompt
|
15
44
|
|
45
|
+
# Generate has_many associations
|
46
|
+
has_many :reviews
|
47
|
+
|
48
|
+
# This will look at the Review class and pass its schema to OpenAI.
|
49
|
+
ai_generated :reviews,
|
50
|
+
prompt: -> (doc) { "Register three reviews for #{doc.title}" },
|
51
|
+
max_results: 3
|
52
|
+
|
16
53
|
private
|
17
54
|
|
18
55
|
def generate_summary_prompt
|
@@ -24,6 +61,9 @@ end
|
|
24
61
|
doc = Document.new(text: "Call me Ishmael...")
|
25
62
|
doc.generate_summary!
|
26
63
|
doc.summary # => an AI-generated summary of `text`
|
64
|
+
|
65
|
+
doc.generate_reviews!
|
66
|
+
doc.reviews # => [#<Review id: nil content: "Wonderful! The way...", rating: 5>, ...]
|
27
67
|
```
|
28
68
|
|
29
69
|
## Installation
|
@@ -48,7 +88,7 @@ And set an OpenAI key
|
|
48
88
|
|
49
89
|
```ruby
|
50
90
|
# config/initializers/active_cortex.rb
|
51
|
-
ActiveCortex.config.
|
91
|
+
ActiveCortex.config.openai_access_token = ENV.fetch("OPENAI_ACCESS_TOKEN")
|
52
92
|
```
|
53
93
|
|
54
94
|
## Contributing
|
@@ -0,0 +1,90 @@
|
|
1
|
+
class ActiveCortex::Generator::HasMany < ActiveCortex::Generator
|
2
|
+
def self.accepts?(record:, field_name:)
|
3
|
+
record.class.reflect_on_association(field_name)&.collection?
|
4
|
+
end
|
5
|
+
|
6
|
+
def save_generation
|
7
|
+
record.send(field_name).push(generation)
|
8
|
+
end
|
9
|
+
|
10
|
+
def generation
|
11
|
+
generate_tool_calls.map do |tool_call|
|
12
|
+
build_record_from_tool_call(tool_call)
|
13
|
+
end
|
14
|
+
end
|
15
|
+
|
16
|
+
private
|
17
|
+
|
18
|
+
def klass
|
19
|
+
record.class.reflect_on_association(field_name).klass
|
20
|
+
end
|
21
|
+
|
22
|
+
def klass_attributes_for_tool_call
|
23
|
+
klass.attribute_types
|
24
|
+
.reject { |name, _| ["id", "created_at", "updated_at"].include?(name) }
|
25
|
+
.reject { |name, _| name.end_with?("_id") }
|
26
|
+
.map { |name, type| [name, { type: type.type }] }
|
27
|
+
.to_h
|
28
|
+
end
|
29
|
+
|
30
|
+
def build_messages_from_tool_calls(tool_calls)
|
31
|
+
[{ role: "user", content: prompt }] + tool_calls.map do |tool_call|
|
32
|
+
[{
|
33
|
+
role: "assistant",
|
34
|
+
content: nil,
|
35
|
+
tool_calls: [tool_call]
|
36
|
+
},{
|
37
|
+
tool_call_id: tool_call["id"],
|
38
|
+
role: "tool",
|
39
|
+
name: tool_call["function"]["name"],
|
40
|
+
content: "OK"
|
41
|
+
}]
|
42
|
+
end.flatten
|
43
|
+
end
|
44
|
+
|
45
|
+
def schema_for_register_function
|
46
|
+
{
|
47
|
+
type: "function",
|
48
|
+
function: {
|
49
|
+
description: "Register a #{klass.name.singularize}", # TODO: test
|
50
|
+
name: "register_#{klass.name.singularize.underscore}", # TODO: test
|
51
|
+
parameters: {
|
52
|
+
type: "object",
|
53
|
+
properties: klass_attributes_for_tool_call
|
54
|
+
}
|
55
|
+
}
|
56
|
+
}
|
57
|
+
end
|
58
|
+
|
59
|
+
def build_record_from_tool_call(tool_call)
|
60
|
+
attrs_json = tool_call["function"]["arguments"]
|
61
|
+
attrs = JSON.parse(attrs_json)
|
62
|
+
|
63
|
+
if attrs.is_a?(Array)
|
64
|
+
attrs.map { |a| klass.new(a) }
|
65
|
+
else
|
66
|
+
klass.new(attrs)
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
def generate_tool_calls(tool_calls=[])
|
71
|
+
res = openai_client.chat(parameters: {
|
72
|
+
model: model,
|
73
|
+
messages: build_messages_from_tool_calls(tool_calls),
|
74
|
+
tools: [schema_for_register_function]
|
75
|
+
})
|
76
|
+
|
77
|
+
raise ActiveCortex::Error, res["error"] if res["error"]
|
78
|
+
|
79
|
+
added_tool_calls = res["choices"][0]["message"]["tool_calls"]
|
80
|
+
return tool_calls if added_tool_calls.blank?
|
81
|
+
|
82
|
+
tool_calls += added_tool_calls
|
83
|
+
|
84
|
+
if max_results && tool_calls.count >= max_results
|
85
|
+
tool_calls
|
86
|
+
else
|
87
|
+
generate_tool_calls(tool_calls)
|
88
|
+
end
|
89
|
+
end
|
90
|
+
end
|
@@ -0,0 +1,34 @@
|
|
1
|
+
class ActiveCortex::Generator::Text < ActiveCortex::Generator
|
2
|
+
def self.accepts?(record:, field_name:)
|
3
|
+
record.class.attribute_types[field_name.to_s].type == :string
|
4
|
+
end
|
5
|
+
|
6
|
+
def save_generation
|
7
|
+
record.send("#{field_name}=", generation)
|
8
|
+
end
|
9
|
+
|
10
|
+
def generation
|
11
|
+
openai_content || raise(ActiveCortex::Error, openai_error_message)
|
12
|
+
end
|
13
|
+
|
14
|
+
private
|
15
|
+
|
16
|
+
def openai_content
|
17
|
+
openai_response["choices"][0]["message"]["content"]
|
18
|
+
rescue
|
19
|
+
nil
|
20
|
+
end
|
21
|
+
|
22
|
+
def openai_error_message
|
23
|
+
"Error from OpenAI. " + { response: openai_response }.to_json
|
24
|
+
end
|
25
|
+
|
26
|
+
def openai_response
|
27
|
+
@openai_response ||= openai_client.chat(parameters: {
|
28
|
+
model: model,
|
29
|
+
messages: [
|
30
|
+
{ role: "user", content: prompt }
|
31
|
+
],
|
32
|
+
})
|
33
|
+
end
|
34
|
+
end
|
@@ -0,0 +1,59 @@
|
|
1
|
+
class ActiveCortex::Generator
|
2
|
+
# This is a factory method that returns an instance of a subclass of
|
3
|
+
# ActiveCortex::Generator. The subclass is chosen based on the type of the
|
4
|
+
# field that is being generated: text or has_many.
|
5
|
+
#
|
6
|
+
# The subclass is responsible for generating the result and saving it to the
|
7
|
+
# database.
|
8
|
+
|
9
|
+
def self.generate(**)
|
10
|
+
find_generator_class(**).new(**).save_generation
|
11
|
+
end
|
12
|
+
|
13
|
+
attr_reader :record, :field_name, :prompt, :max_results, :model
|
14
|
+
|
15
|
+
def initialize(record:, field_name:, prompt:, max_results:, model:)
|
16
|
+
@record = record
|
17
|
+
@field_name = field_name
|
18
|
+
@prompt = prompt
|
19
|
+
@max_results = max_results
|
20
|
+
@model = model
|
21
|
+
|
22
|
+
raise ArgumentError, "Invalid model provided must be " \
|
23
|
+
"e.g. 'gpt-3.5-turbo', was #{model.inspect}" unless valid_model?
|
24
|
+
end
|
25
|
+
|
26
|
+
def generation
|
27
|
+
raise NotImplementedError
|
28
|
+
end
|
29
|
+
|
30
|
+
def save_generation
|
31
|
+
raise NotImplementedError
|
32
|
+
end
|
33
|
+
|
34
|
+
private
|
35
|
+
|
36
|
+
def self.find_generator_class(record:, field_name:, **)
|
37
|
+
subclasses.find do |subclass|
|
38
|
+
subclass.accepts?(record:, field_name:)
|
39
|
+
end or raise(ActiveCortex::Error, "No generator found for '#{field_name}'")
|
40
|
+
end
|
41
|
+
|
42
|
+
def prompt
|
43
|
+
case @prompt
|
44
|
+
when Symbol then @record.send(@prompt)
|
45
|
+
when Proc then @prompt.call(@record)
|
46
|
+
else
|
47
|
+
raise ActiveCortex::Error,
|
48
|
+
"Prompt must be a symbol or a proc, got #{@prompt.inspect}"
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
def openai_client
|
53
|
+
@openai_client ||= OpenAI::Client.new(access_token: ActiveCortex.config.openai_access_token)
|
54
|
+
end
|
55
|
+
|
56
|
+
def valid_model?
|
57
|
+
model.present? && model.is_a?(String)
|
58
|
+
end
|
59
|
+
end
|
data/lib/active_cortex/model.rb
CHANGED
@@ -3,37 +3,59 @@ require "openai"
|
|
3
3
|
module ActiveCortex::Model
|
4
4
|
extend ActiveSupport::Concern
|
5
5
|
|
6
|
+
DEFAULT_MODEL = "gpt-3.5-turbo"
|
7
|
+
|
6
8
|
class_methods do
|
7
|
-
|
8
|
-
|
9
|
-
|
9
|
+
# Macro to add methods to a model to generate content for a field.
|
10
|
+
#
|
11
|
+
# For has_many associations, the macro will generate a method that appends an array of
|
12
|
+
# generated objects to the association.
|
13
|
+
#
|
14
|
+
# Example:
|
15
|
+
#
|
16
|
+
# class Post < ApplicationRecord
|
17
|
+
# ai_generated :title, prompt: -> (post) { "Write a title for a post about #{post.topic}" }
|
18
|
+
# end
|
19
|
+
#
|
20
|
+
# post = Post.new(topic: "cats")
|
21
|
+
# post.generate_title!
|
22
|
+
# post.title # => "Cats are the best"
|
23
|
+
#
|
24
|
+
# Example with has_many association:
|
25
|
+
#
|
26
|
+
# class Post < ApplicationRecord
|
27
|
+
# has_many :comments
|
28
|
+
# ai_generated :comments,
|
29
|
+
# prompt: -> (post) { "Register a comment on #{post.title}" },
|
30
|
+
# max_results: 3
|
31
|
+
# end
|
32
|
+
#
|
33
|
+
# post = Post.new(title: "Cats are the best")
|
34
|
+
# post.generate_comments!
|
35
|
+
# post.comments # => [#<Comment id: 1, content: "I love cats">, ...]
|
36
|
+
#
|
37
|
+
# Options:
|
38
|
+
#
|
39
|
+
# * prompt: a symbol or a proc that returns a string to use as the prompt
|
40
|
+
# * model: the ChatGPT model to use for generating content
|
41
|
+
# * max_results: for has_many associations, the maximum number of results to generate
|
42
|
+
def ai_generated(field_name, prompt:, max_results: nil, model: DEFAULT_MODEL)
|
43
|
+
validate_arguments!(field_name, prompt, max_results, model)
|
44
|
+
|
45
|
+
define_method "generate_#{field_name}!" do
|
46
|
+
ActiveCortex::Generator.generate(
|
47
|
+
record: self, field_name:, prompt:, max_results:, model:
|
48
|
+
)
|
10
49
|
end
|
11
50
|
end
|
12
|
-
end
|
13
51
|
|
14
|
-
|
52
|
+
private
|
15
53
|
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
end
|
23
|
-
|
24
|
-
query_chatgpt_with(content)
|
25
|
-
rescue => e
|
26
|
-
raise ActiveCortex::Error, e.message
|
27
|
-
end
|
28
|
-
|
29
|
-
def query_chatgpt_with(content)
|
30
|
-
openai_client.chat(parameters: {
|
31
|
-
model: "gpt-3.5-turbo",
|
32
|
-
messages: [{ role: "user", content: content }],
|
33
|
-
})["choices"][0]["message"]["content"]
|
34
|
-
end
|
35
|
-
|
36
|
-
def openai_client
|
37
|
-
@openai_client ||= OpenAI::Client.new(access_token: ActiveCortex.config.openai_access_token)
|
54
|
+
def validate_arguments!(field_name, prompt, max_results, model)
|
55
|
+
raise ArgumentError, "field_name must be a symbol or string" unless field_name.is_a?(Symbol) || field_name.is_a?(String)
|
56
|
+
raise ArgumentError, "prompt must be a proc" unless prompt.is_a?(Proc)
|
57
|
+
raise ArgumentError, "max_results must be a number" unless max_results.nil? || max_results.is_a?(Integer)
|
58
|
+
raise ArgumentError, "model must be a string" unless model.is_a?(String)
|
59
|
+
end
|
38
60
|
end
|
39
61
|
end
|
data/lib/active_cortex.rb
CHANGED
@@ -1,6 +1,9 @@
|
|
1
1
|
require "active_cortex/version"
|
2
2
|
require "active_cortex/config"
|
3
3
|
require "active_cortex/railtie"
|
4
|
+
require "active_cortex/generator"
|
5
|
+
require "active_cortex/generator/text"
|
6
|
+
require "active_cortex/generator/has_many"
|
4
7
|
require "active_cortex/model"
|
5
8
|
|
6
9
|
module ActiveCortex
|
metadata
CHANGED
@@ -1,55 +1,55 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: active_cortex
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.2.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Ori Marash
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2023-
|
11
|
+
date: 2023-11-15 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: rails
|
15
15
|
requirement: !ruby/object:Gem::Requirement
|
16
16
|
requirements:
|
17
|
-
- - "
|
17
|
+
- - "~>"
|
18
18
|
- !ruby/object:Gem::Version
|
19
19
|
version: 7.0.8
|
20
20
|
type: :runtime
|
21
21
|
prerelease: false
|
22
22
|
version_requirements: !ruby/object:Gem::Requirement
|
23
23
|
requirements:
|
24
|
-
- - "
|
24
|
+
- - "~>"
|
25
25
|
- !ruby/object:Gem::Version
|
26
26
|
version: 7.0.8
|
27
27
|
- !ruby/object:Gem::Dependency
|
28
28
|
name: dry-configurable
|
29
29
|
requirement: !ruby/object:Gem::Requirement
|
30
30
|
requirements:
|
31
|
-
- - "
|
31
|
+
- - "~>"
|
32
32
|
- !ruby/object:Gem::Version
|
33
33
|
version: '1.0'
|
34
34
|
type: :runtime
|
35
35
|
prerelease: false
|
36
36
|
version_requirements: !ruby/object:Gem::Requirement
|
37
37
|
requirements:
|
38
|
-
- - "
|
38
|
+
- - "~>"
|
39
39
|
- !ruby/object:Gem::Version
|
40
40
|
version: '1.0'
|
41
41
|
- !ruby/object:Gem::Dependency
|
42
42
|
name: ruby-openai
|
43
43
|
requirement: !ruby/object:Gem::Requirement
|
44
44
|
requirements:
|
45
|
-
- - "
|
45
|
+
- - "~>"
|
46
46
|
- !ruby/object:Gem::Version
|
47
47
|
version: '5.1'
|
48
48
|
type: :runtime
|
49
49
|
prerelease: false
|
50
50
|
version_requirements: !ruby/object:Gem::Requirement
|
51
51
|
requirements:
|
52
|
-
- - "
|
52
|
+
- - "~>"
|
53
53
|
- !ruby/object:Gem::Version
|
54
54
|
version: '5.1'
|
55
55
|
description: Easily add AI-generated fields to your Raila models.
|
@@ -64,6 +64,9 @@ files:
|
|
64
64
|
- Rakefile
|
65
65
|
- lib/active_cortex.rb
|
66
66
|
- lib/active_cortex/config.rb
|
67
|
+
- lib/active_cortex/generator.rb
|
68
|
+
- lib/active_cortex/generator/has_many.rb
|
69
|
+
- lib/active_cortex/generator/text.rb
|
67
70
|
- lib/active_cortex/model.rb
|
68
71
|
- lib/active_cortex/railtie.rb
|
69
72
|
- lib/active_cortex/version.rb
|
@@ -89,7 +92,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
89
92
|
- !ruby/object:Gem::Version
|
90
93
|
version: '0'
|
91
94
|
requirements: []
|
92
|
-
rubygems_version: 3.
|
95
|
+
rubygems_version: 3.4.10
|
93
96
|
signing_key:
|
94
97
|
specification_version: 4
|
95
98
|
summary: Easily add AI-generated fields to your Rails models.
|