ai-engine 0.0.1 → 0.3.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +55 -7
- data/Rakefile +7 -1
- data/app/models/ai/engine/application_record.rb +7 -0
- data/app/models/ai/engine/assistant.rb +15 -0
- data/app/models/ai/engine/assistant_thread.rb +32 -0
- data/app/models/ai/engine/chat.rb +45 -0
- data/app/models/ai/engine/message.rb +73 -0
- data/app/models/ai/engine/run.rb +58 -0
- data/app/models/concerns/ai/engine/assistable.rb +48 -0
- data/app/models/concerns/ai/engine/chattable.rb +23 -0
- data/app/models/concerns/ai/engine/remote_id_validatable.rb +32 -0
- data/app/models/concerns/ai/engine/threadable.rb +23 -0
- data/app/services/ai/engine/openai/assistants/create.rb +25 -0
- data/app/services/ai/engine/openai/assistants/retrieve.rb +15 -0
- data/app/services/ai/engine/openai/assistants/update.rb +25 -0
- data/app/services/ai/engine/openai/chats/stream.rb +24 -0
- data/app/services/ai/engine/openai/messages/create.rb +23 -0
- data/app/services/ai/engine/openai/runs/create.rb +21 -0
- data/app/services/ai/engine/openai/runs/retrieve.rb +15 -0
- data/app/services/ai/engine/openai/threads/create.rb +17 -0
- data/config/initializers/inflections.rb +4 -0
- data/config/initializers/openai.rb +1 -0
- data/db/migrate/20240528153439_create_ai_engine_assistants.rb +12 -0
- data/db/migrate/20240530121855_create_ai_engine_assistant_threads.rb +10 -0
- data/db/migrate/20240530130834_create_ai_engine_runs.rb +11 -0
- data/db/migrate/20240530135629_create_ai_engine_messages.rb +16 -0
- data/db/migrate/20240619150010_create_ai_engine_chats.rb +9 -0
- data/lib/ai/engine/engine.rb +58 -0
- data/lib/ai/engine/version.rb +2 -2
- data/lib/ai/engine.rb +2 -1
- metadata +42 -35
- data/.gitignore +0 -9
- data/Gemfile +0 -4
- data/LICENSE.txt +0 -21
- data/ai-engine.gemspec +0 -37
- data/bin/console +0 -14
- data/bin/setup +0 -8
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 243780cd32368a63bf16fef0f89a122c2877d338fa196c885925aee25161e060
|
4
|
+
data.tar.gz: a41343a78d3bfe8098c4fa8b169dbc85c789351ba710f16da927ca7b609b654b
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: ab48e0f806a2b4171e16b567c819c6d759cd453b525bc9d4152b689ab195922fc7eafc691e222cad13f05682226a5e8a9e836c11da610062378cf24478116acf
|
7
|
+
data.tar.gz: 25826ebd894a27f9b0599e2fde6586acc76bc496de3ec9a98d0f1a3178fac780ac11358dbe9735dcd0e9d892aa27bc22199eaed3713c744a1fae7f693f5619b6
|
data/README.md
CHANGED
@@ -1,13 +1,61 @@
|
|
1
|
-
#
|
1
|
+
# AI::Engine
|
2
2
|
|
3
|
-
|
3
|
+
An experimental, easy-ish way to add AI assistants to your Rails app!
|
4
4
|
|
5
|
+
## Usage
|
6
|
+
|
7
|
+
You can add AI::Engine to your Gemfile like this:
|
8
|
+
|
9
|
+
```
|
10
|
+
gem "ai-engine", "~> 0.3.0"
|
11
|
+
```
|
12
|
+
|
13
|
+
You then need to add the migrations for the gem:
|
14
|
+
|
15
|
+
```
|
16
|
+
bundle exec rails ai_engine:install:migrations
|
5
17
|
```
|
6
|
-
|
7
|
-
|
8
|
-
|
18
|
+
|
19
|
+
And run them:
|
20
|
+
|
21
|
+
```
|
22
|
+
bundle exec rails db:migrate
|
23
|
+
```
|
24
|
+
|
25
|
+
Full usage documentation can be found at [RailsAI.com](https://railsai.com/docs/installation).
|
26
|
+
|
27
|
+
## Engine Development
|
28
|
+
|
29
|
+
### Test local version in a Rails app
|
30
|
+
|
31
|
+
```bash
|
32
|
+
gem "ai-engine", path: "../ai-engine"
|
9
33
|
```
|
10
34
|
|
11
|
-
|
35
|
+
### ENV
|
36
|
+
|
37
|
+
The dummy app needs a .env file in the root of the engine for manual and RSpec testing - see .env.example.
|
38
|
+
|
39
|
+
### Dummy app
|
40
|
+
|
41
|
+
Run the dummy app from the root of the project with `bin/dev` in one tab and `bin/rails s` in another (so debugger will work).
|
42
|
+
|
43
|
+
### Tests
|
44
|
+
|
45
|
+
Run the tests from the root of the project with `rspec`.
|
46
|
+
|
47
|
+
### VCR
|
48
|
+
|
49
|
+
AI::Engine uses VCR to record HTTP requests and responses. By default, specs are run against recorded 'cassette' fixtures.
|
50
|
+
|
51
|
+
Set OPENAI_ACCESS_TOKEN= in your .env file to run the specs against a live API and re-record all cassettes - this will cost you money!
|
52
|
+
|
53
|
+
### Release
|
54
|
+
|
55
|
+
First run the specs without VCR so they actually hit the API. This will cost 2 cents or more. Set OPENAI_ACCESS_TOKEN in your environment or pass it in like this:
|
56
|
+
|
57
|
+
```
|
58
|
+
OPENAI_ACCESS_TOKEN=123abc bundle exec rspec
|
59
|
+
```
|
12
60
|
|
13
|
-
|
61
|
+
Then update the version number in `version.rb`, update `CHANGELOG.md`, run `bundle install` to update Gemfile.lock, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org).
|
data/Rakefile
CHANGED
@@ -0,0 +1,15 @@
|
|
1
|
+
module AI::Engine
|
2
|
+
class Assistant < ApplicationRecord
|
3
|
+
include RemoteIdValidatable
|
4
|
+
|
5
|
+
MIN_PROMPT_TOKENS = 256
|
6
|
+
MIN_COMPLETION_TOKENS = 16
|
7
|
+
|
8
|
+
belongs_to :assistable, polymorphic: true
|
9
|
+
has_many :runs, class_name: "AI::Engine::Run", foreign_key: "ai_engine_assistant_id", dependent: :nullify
|
10
|
+
|
11
|
+
def to_partial_path
|
12
|
+
"assistants/assistant"
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|
@@ -0,0 +1,32 @@
|
|
1
|
+
module AI::Engine
|
2
|
+
class AssistantThread < ApplicationRecord
|
3
|
+
include RemoteIdValidatable
|
4
|
+
|
5
|
+
belongs_to :threadable, polymorphic: true
|
6
|
+
has_many :runs, class_name: "AI::Engine::Run", foreign_key: "ai_engine_assistant_thread_id", dependent: :nullify
|
7
|
+
has_many :messages, as: :messageable, class_name: "AI::Engine::Message", foreign_key: "messageable_id", dependent: :nullify
|
8
|
+
|
9
|
+
before_create :create_openai_thread
|
10
|
+
|
11
|
+
def run(assistant_id:, content:)
|
12
|
+
# Create the request Message, locally and remotely on OpenAI.
|
13
|
+
AI::Engine::Message.create(messageable: self, content: content, role: "user")
|
14
|
+
|
15
|
+
# Run the Thread using the selected Assistant.
|
16
|
+
runs.create(ai_engine_assistant_id: assistant_id)
|
17
|
+
end
|
18
|
+
|
19
|
+
def to_partial_path
|
20
|
+
"assistant_threads/assistant_thread"
|
21
|
+
end
|
22
|
+
|
23
|
+
private
|
24
|
+
|
25
|
+
def create_openai_thread
|
26
|
+
self.remote_id = AI::Engine::OpenAI::Threads::Create.call
|
27
|
+
rescue Faraday::Error => e
|
28
|
+
errors.add(:base, e.message)
|
29
|
+
throw(:abort)
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
@@ -0,0 +1,45 @@
|
|
1
|
+
module AI::Engine
|
2
|
+
class Chat < ApplicationRecord
|
3
|
+
belongs_to :chattable, polymorphic: true
|
4
|
+
has_many :messages, as: :messageable, class_name: "AI::Engine::Message", foreign_key: "messageable_id", dependent: :nullify
|
5
|
+
|
6
|
+
def run(model:)
|
7
|
+
# Run the Chat, sending the complete message history to OpenAI.
|
8
|
+
AI::Engine::OpenAI::Chats::Stream.call(chat_id: id, stream: stream(model: model), model: model)
|
9
|
+
end
|
10
|
+
|
11
|
+
def messages_for_openai
|
12
|
+
messages.order(:created_at).map do |message|
|
13
|
+
{
|
14
|
+
role: message.role,
|
15
|
+
content: message.content
|
16
|
+
}
|
17
|
+
end.filter { |message| message[:content].present? }
|
18
|
+
end
|
19
|
+
|
20
|
+
def stream(model:)
|
21
|
+
response_message = messages.create(
|
22
|
+
role: "assistant",
|
23
|
+
content: "",
|
24
|
+
model: model
|
25
|
+
)
|
26
|
+
|
27
|
+
proc do |chunk, _bytesize|
|
28
|
+
if chunk["object"] == "chat.completion.chunk"
|
29
|
+
new_content = chunk.dig("choices", 0, "delta", "content")
|
30
|
+
response_message.update(content: response_message.content + new_content) if new_content
|
31
|
+
end
|
32
|
+
if chunk["usage"]
|
33
|
+
response_message.update(
|
34
|
+
prompt_token_usage: chunk.dig("usage", "prompt_tokens"),
|
35
|
+
completion_token_usage: chunk.dig("usage", "completion_tokens")
|
36
|
+
)
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
def to_partial_path
|
42
|
+
"chats/chat"
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
@@ -0,0 +1,73 @@
|
|
1
|
+
module AI::Engine
|
2
|
+
class Message < ApplicationRecord
|
3
|
+
include RemoteIdValidatable
|
4
|
+
|
5
|
+
belongs_to :messageable, polymorphic: true # AI::Engine::Chat or AI::Engine::AssistantThread
|
6
|
+
belongs_to :run, class_name: "AI::Engine::Run", foreign_key: "ai_engine_run_id", optional: true
|
7
|
+
|
8
|
+
enum role: {system: 0, assistant: 10, user: 20}
|
9
|
+
|
10
|
+
before_create :create_openai_message,
|
11
|
+
if: -> { in_assistant_thread? }, # Chat messages are only stored locally.
|
12
|
+
unless: -> { assistant? } # Checking the role - assistant messages on the OpenAI side are created by a Run.
|
13
|
+
after_create :on_create
|
14
|
+
after_update :on_update
|
15
|
+
|
16
|
+
def input_cost
|
17
|
+
return unless assistant?
|
18
|
+
|
19
|
+
return unless prompt_token_usage.present? && model.present?
|
20
|
+
|
21
|
+
(prompt_token_usage.to_i * AI::Engine::DOLLAR_COST_PER_1K_TOKENS[model]["input"] / 1000).round(4)
|
22
|
+
end
|
23
|
+
|
24
|
+
def output_cost
|
25
|
+
return unless assistant?
|
26
|
+
|
27
|
+
return unless completion_token_usage.present? && model.present?
|
28
|
+
|
29
|
+
(completion_token_usage.to_i * AI::Engine::DOLLAR_COST_PER_1K_TOKENS[model]["output"] / 1000).round(4)
|
30
|
+
end
|
31
|
+
|
32
|
+
def user
|
33
|
+
in_chat? ? messageable.chattable : messageable.threadable
|
34
|
+
end
|
35
|
+
|
36
|
+
def in_chat?
|
37
|
+
messageable.is_a?(AI::Engine::Chat)
|
38
|
+
end
|
39
|
+
|
40
|
+
def in_assistant_thread?
|
41
|
+
messageable.is_a?(AI::Engine::AssistantThread)
|
42
|
+
end
|
43
|
+
|
44
|
+
def on_create
|
45
|
+
if in_chat?
|
46
|
+
messageable.chattable.ai_engine_on_message_create(message: self)
|
47
|
+
else
|
48
|
+
messageable.threadable.ai_engine_on_message_create(message: self)
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
def on_update
|
53
|
+
if in_chat?
|
54
|
+
messageable.chattable.ai_engine_on_message_update(message: self)
|
55
|
+
else
|
56
|
+
messageable.threadable.ai_engine_on_message_update(message: self)
|
57
|
+
end
|
58
|
+
end
|
59
|
+
|
60
|
+
def to_partial_path
|
61
|
+
"messages/message"
|
62
|
+
end
|
63
|
+
|
64
|
+
private
|
65
|
+
|
66
|
+
def create_openai_message
|
67
|
+
self.remote_id = AI::Engine::OpenAI::Messages::Create.call(thread_id: messageable.remote_id, content: content, role: role)
|
68
|
+
rescue Faraday::Error => e
|
69
|
+
errors.add(:base, e.message)
|
70
|
+
throw(:abort)
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end
|
@@ -0,0 +1,58 @@
|
|
1
|
+
module AI::Engine
|
2
|
+
class Run < ApplicationRecord
|
3
|
+
include AI::Engine::RemoteIdValidatable
|
4
|
+
|
5
|
+
belongs_to :assistant, class_name: "AI::Engine::Assistant", foreign_key: "ai_engine_assistant_id"
|
6
|
+
belongs_to :assistant_thread, class_name: "AI::Engine::AssistantThread", foreign_key: "ai_engine_assistant_thread_id"
|
7
|
+
has_many :messages, class_name: "AI::Engine::Message", foreign_key: "ai_engine_run_id", dependent: :nullify
|
8
|
+
|
9
|
+
after_create :create_openai_run
|
10
|
+
|
11
|
+
def to_partial_path
|
12
|
+
"runs/run"
|
13
|
+
end
|
14
|
+
|
15
|
+
private
|
16
|
+
|
17
|
+
def create_openai_run
|
18
|
+
AI::Engine::OpenAI::Runs::Create.call(
|
19
|
+
assistant_id: assistant.remote_id,
|
20
|
+
thread_id: assistant_thread.remote_id,
|
21
|
+
stream: stream
|
22
|
+
)
|
23
|
+
rescue Faraday::Error => e
|
24
|
+
errors.add(:base, e.message)
|
25
|
+
throw(:abort)
|
26
|
+
end
|
27
|
+
|
28
|
+
def stream
|
29
|
+
response_message = assistant_thread.messages.create(
|
30
|
+
ai_engine_run_id: id,
|
31
|
+
model: assistant.assistable.model,
|
32
|
+
role: "assistant",
|
33
|
+
content: ""
|
34
|
+
)
|
35
|
+
proc do |chunk, _bytesize|
|
36
|
+
if chunk["object"] == "thread.message.delta"
|
37
|
+
new_content = chunk.dig("delta", "content", 0, "text", "value")
|
38
|
+
response_message.update(content: response_message.content + new_content) if new_content
|
39
|
+
elsif chunk["status"] == "completed"
|
40
|
+
if chunk["run_id"].present? && !remote_id.present?
|
41
|
+
update(remote_id: chunk["run_id"])
|
42
|
+
remote_run = AI::Engine::OpenAI::Runs::Retrieve.call(remote_id: chunk["run_id"], thread_id: assistant_thread.remote_id)
|
43
|
+
if remote_run.present?
|
44
|
+
response_message.update(
|
45
|
+
prompt_token_usage: remote_run.dig("usage", "prompt_tokens"),
|
46
|
+
completion_token_usage: remote_run.dig("usage", "completion_tokens")
|
47
|
+
)
|
48
|
+
end
|
49
|
+
end
|
50
|
+
|
51
|
+
if chunk["id"].present? && chunk["id"].start_with?(AI::Engine::Message.remote_id_prefix) && !response_message.remote_id.present?
|
52
|
+
response_message.update(remote_id: chunk["id"])
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
@@ -0,0 +1,48 @@
|
|
1
|
+
module AI
|
2
|
+
module Engine
|
3
|
+
module Assistable
|
4
|
+
extend ActiveSupport::Concern
|
5
|
+
|
6
|
+
included do
|
7
|
+
has_one :assistant, as: :assistable, class_name: "AI::Engine::Assistant"
|
8
|
+
|
9
|
+
before_create :create_openai_assistant
|
10
|
+
before_update :update_openai_assistant
|
11
|
+
|
12
|
+
# Default. Override in including model to customize.
|
13
|
+
def ai_engine_assistant
|
14
|
+
Logger.new($stdout).info("ai_engine_assistant called - add `def ai_engine_assistant` to your #{self.class.name} model to define the Assistant params. The method should return a Hash of: {name:, model:, description:, instructions:}.")
|
15
|
+
{
|
16
|
+
name: "Assistant for #{self.class.name} #{id}",
|
17
|
+
model: AI::Engine::MODEL_OPTIONS.first,
|
18
|
+
description: "Assistant for #{self.class.name} #{id}",
|
19
|
+
instructions: "Assistant for #{self.class.name} #{id}"
|
20
|
+
}
|
21
|
+
end
|
22
|
+
|
23
|
+
def ai_engine_run(assistant_thread:, content:)
|
24
|
+
assistant_thread.run(assistant_id: assistant.id, content: content)
|
25
|
+
end
|
26
|
+
|
27
|
+
private
|
28
|
+
|
29
|
+
def create_openai_assistant
|
30
|
+
build_assistant
|
31
|
+
begin
|
32
|
+
assistant.remote_id = AI::Engine::OpenAI::Assistants::Create.call(**ai_engine_assistant)
|
33
|
+
rescue Faraday::Error => e
|
34
|
+
errors.add(:base, e.message)
|
35
|
+
throw(:abort)
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
def update_openai_assistant
|
40
|
+
AI::Engine::OpenAI::Assistants::Update.call(remote_id: assistant&.remote_id, **ai_engine_assistant)
|
41
|
+
rescue Faraday::Error => e
|
42
|
+
errors.add(:base, e.message)
|
43
|
+
throw(:abort)
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
@@ -0,0 +1,23 @@
|
|
1
|
+
module AI
|
2
|
+
module Engine
|
3
|
+
module Chattable
|
4
|
+
extend ActiveSupport::Concern
|
5
|
+
|
6
|
+
included do
|
7
|
+
has_many :chats, as: :chattable, class_name: "AI::Engine::Chat"
|
8
|
+
|
9
|
+
def ai_engine_on_message_create(message:)
|
10
|
+
# This is a hook for the AI Engine to notify the Chattable that a Message has been updated.
|
11
|
+
# Override this method in your Chattable model to handle the event.
|
12
|
+
Logger.new($stdout).info("ai_engine_on_message_create called - add `def ai_engine_on_message_create(message:)` to your Chattable model to handle this event.")
|
13
|
+
end
|
14
|
+
|
15
|
+
def ai_engine_on_message_update(message:)
|
16
|
+
# This is a hook for the AI Engine to notify the Chattable that a Message has been updated.
|
17
|
+
# Override this method in your Chattable model to handle the event.
|
18
|
+
Logger.new($stdout).info("ai_engine_on_message_update called - add `def ai_engine_on_message_update(message:)` to your Chattable model to handle this event.")
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
@@ -0,0 +1,32 @@
|
|
1
|
+
module AI
|
2
|
+
module Engine
|
3
|
+
module RemoteIdValidatable
|
4
|
+
extend ActiveSupport::Concern
|
5
|
+
|
6
|
+
REMOTE_ID_PREFIXES = {
|
7
|
+
run: "run_",
|
8
|
+
message: "msg_",
|
9
|
+
assistant: "asst_",
|
10
|
+
assistant_thread: "thread_"
|
11
|
+
}.freeze
|
12
|
+
|
13
|
+
included do
|
14
|
+
# Validate the remote_id format is correct.
|
15
|
+
validate :remote_id_format
|
16
|
+
|
17
|
+
def remote_id_format
|
18
|
+
prefix = self.class.remote_id_prefix
|
19
|
+
return if remote_id.nil? || remote_id.start_with?(prefix)
|
20
|
+
|
21
|
+
errors.add(:remote_id, "ID '#{remote_id}' must start with '#{prefix}'")
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
class_methods do
|
26
|
+
def remote_id_prefix
|
27
|
+
REMOTE_ID_PREFIXES[name.demodulize.underscore.to_sym]
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
@@ -0,0 +1,23 @@
|
|
1
|
+
module AI
|
2
|
+
module Engine
|
3
|
+
module Threadable
|
4
|
+
extend ActiveSupport::Concern
|
5
|
+
|
6
|
+
included do
|
7
|
+
has_many :assistant_threads, as: :threadable, class_name: "AI::Engine::AssistantThread"
|
8
|
+
|
9
|
+
def ai_engine_on_message_create(message:)
|
10
|
+
# This is a hook for the AI Engine to notify the AssistantThreadtable that a Message has been updated.
|
11
|
+
# Override this method in your AssistantThreadtable model to handle the event.
|
12
|
+
Logger.new($stdout).info("ai_engine_on_message_create called - add `def ai_engine_on_message_create(message:)` to your Threadable model to handle this event.")
|
13
|
+
end
|
14
|
+
|
15
|
+
def ai_engine_on_message_update(message:)
|
16
|
+
# This is a hook for the AI Engine to notify the AssistantThreadtable that a Message has been updated.
|
17
|
+
# Override this method in your AssistantThreadtable model to handle the event.
|
18
|
+
Logger.new($stdout).info("ai_engine_on_message_update called - add `def ai_engine_on_message_update(message:)` to your Threadable model to handle this event.")
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
@@ -0,0 +1,25 @@
|
|
1
|
+
class AI::Engine::OpenAI::Assistants::Create
|
2
|
+
# Creates a new Assistant on the OpenAI API.
|
3
|
+
# Returns the OpenAI ID of the new Assistant.
|
4
|
+
def self.call(name:, model:, description:, instructions:)
|
5
|
+
response = client.assistants.create(
|
6
|
+
parameters: {
|
7
|
+
name: name,
|
8
|
+
model: model,
|
9
|
+
description: description,
|
10
|
+
instructions: instructions
|
11
|
+
}
|
12
|
+
)
|
13
|
+
|
14
|
+
response["id"]
|
15
|
+
end
|
16
|
+
|
17
|
+
private_class_method def self.client
|
18
|
+
@client ||= OpenAI::Client.new(
|
19
|
+
access_token: AI::Engine::Engine.config.openai_access_token,
|
20
|
+
organization_id: AI::Engine::Engine.config.openai_organization_id,
|
21
|
+
log_errors: Rails.env.development? || Rails.env.test?,
|
22
|
+
request_timeout: 2.minutes.to_i
|
23
|
+
)
|
24
|
+
end
|
25
|
+
end
|
@@ -0,0 +1,15 @@
|
|
1
|
+
class AI::Engine::OpenAI::Assistants::Retrieve
|
2
|
+
# Retrieves an OpenAI Assistant by its ID.
|
3
|
+
def self.call(remote_id:)
|
4
|
+
client.assistants.retrieve(id: remote_id)
|
5
|
+
end
|
6
|
+
|
7
|
+
private_class_method def self.client
|
8
|
+
@client ||= OpenAI::Client.new(
|
9
|
+
access_token: AI::Engine::Engine.config.openai_access_token,
|
10
|
+
organization_id: AI::Engine::Engine.config.openai_organization_id,
|
11
|
+
log_errors: Rails.env.development? || Rails.env.test?,
|
12
|
+
request_timeout: 2.minutes.to_i
|
13
|
+
)
|
14
|
+
end
|
15
|
+
end
|
@@ -0,0 +1,25 @@
|
|
1
|
+
class AI::Engine::OpenAI::Assistants::Update
|
2
|
+
# Updates an OpenAI Assistant with the given parameters.
|
3
|
+
def self.call(remote_id:, name: nil, model: nil, description: nil, instructions: nil)
|
4
|
+
parameters = {
|
5
|
+
name: name,
|
6
|
+
model: model,
|
7
|
+
description: description,
|
8
|
+
instructions: instructions
|
9
|
+
}.compact
|
10
|
+
|
11
|
+
client.assistants.modify(
|
12
|
+
id: remote_id,
|
13
|
+
parameters: parameters
|
14
|
+
)
|
15
|
+
end
|
16
|
+
|
17
|
+
private_class_method def self.client
|
18
|
+
@client ||= OpenAI::Client.new(
|
19
|
+
access_token: AI::Engine::Engine.config.openai_access_token,
|
20
|
+
organization_id: AI::Engine::Engine.config.openai_organization_id,
|
21
|
+
log_errors: Rails.env.development? || Rails.env.test?,
|
22
|
+
request_timeout: 2.minutes.to_i
|
23
|
+
)
|
24
|
+
end
|
25
|
+
end
|
@@ -0,0 +1,24 @@
|
|
1
|
+
class AI::Engine::OpenAI::Chats::Stream
|
2
|
+
# Gets the next message response to a set of messages.
|
3
|
+
def self.call(chat_id:, stream:, model:)
|
4
|
+
chat = AI::Engine::Chat.find(chat_id)
|
5
|
+
|
6
|
+
client.chat(
|
7
|
+
parameters: {
|
8
|
+
model: model,
|
9
|
+
messages: chat.messages_for_openai,
|
10
|
+
stream: stream,
|
11
|
+
stream_options: {include_usage: true}
|
12
|
+
}
|
13
|
+
)
|
14
|
+
end
|
15
|
+
|
16
|
+
private_class_method def self.client
|
17
|
+
@client ||= OpenAI::Client.new(
|
18
|
+
access_token: AI::Engine::Engine.config.openai_access_token,
|
19
|
+
organization_id: AI::Engine::Engine.config.openai_organization_id,
|
20
|
+
log_errors: Rails.env.development? || Rails.env.test?,
|
21
|
+
request_timeout: 2.minutes.to_i
|
22
|
+
)
|
23
|
+
end
|
24
|
+
end
|
@@ -0,0 +1,23 @@
|
|
1
|
+
class AI::Engine::OpenAI::Messages::Create
|
2
|
+
# Gets the OpenAI ID of a new Message.
|
3
|
+
def self.call(thread_id:, content:, role:)
|
4
|
+
response = client.messages.create(
|
5
|
+
thread_id: thread_id,
|
6
|
+
parameters: {
|
7
|
+
content: content,
|
8
|
+
role: role
|
9
|
+
}
|
10
|
+
)
|
11
|
+
|
12
|
+
response["id"]
|
13
|
+
end
|
14
|
+
|
15
|
+
private_class_method def self.client
|
16
|
+
@client ||= OpenAI::Client.new(
|
17
|
+
access_token: AI::Engine::Engine.config.openai_access_token,
|
18
|
+
organization_id: AI::Engine::Engine.config.openai_organization_id,
|
19
|
+
log_errors: Rails.env.development? || Rails.env.test?,
|
20
|
+
request_timeout: 2.minutes.to_i
|
21
|
+
)
|
22
|
+
end
|
23
|
+
end
|
@@ -0,0 +1,21 @@
|
|
1
|
+
class AI::Engine::OpenAI::Runs::Create
|
2
|
+
# Creates a new Run on the OpenAI API.
|
3
|
+
def self.call(assistant_id:, thread_id:, stream: false)
|
4
|
+
client.runs.create(
|
5
|
+
thread_id: thread_id,
|
6
|
+
parameters: {
|
7
|
+
assistant_id: assistant_id,
|
8
|
+
stream: stream
|
9
|
+
}
|
10
|
+
)
|
11
|
+
end
|
12
|
+
|
13
|
+
private_class_method def self.client
|
14
|
+
@client ||= OpenAI::Client.new(
|
15
|
+
access_token: AI::Engine::Engine.config.openai_access_token,
|
16
|
+
organization_id: AI::Engine::Engine.config.openai_organization_id,
|
17
|
+
log_errors: Rails.env.development? || Rails.env.test?,
|
18
|
+
request_timeout: 2.minutes.to_i
|
19
|
+
)
|
20
|
+
end
|
21
|
+
end
|
@@ -0,0 +1,15 @@
|
|
1
|
+
class AI::Engine::OpenAI::Runs::Retrieve
|
2
|
+
# Retrieves an OpenAI Run by its ID.
|
3
|
+
def self.call(remote_id:, thread_id:)
|
4
|
+
client.runs.retrieve(id: remote_id, thread_id: thread_id)
|
5
|
+
end
|
6
|
+
|
7
|
+
private_class_method def self.client
|
8
|
+
@client ||= OpenAI::Client.new(
|
9
|
+
access_token: AI::Engine::Engine.config.openai_access_token,
|
10
|
+
organization_id: AI::Engine::Engine.config.openai_organization_id,
|
11
|
+
log_errors: Rails.env.development? || Rails.env.test?,
|
12
|
+
request_timeout: 2.minutes.to_i
|
13
|
+
)
|
14
|
+
end
|
15
|
+
end
|
@@ -0,0 +1,17 @@
|
|
1
|
+
class AI::Engine::OpenAI::Threads::Create
|
2
|
+
# Gets the OpenAI ID of a new Thread.
|
3
|
+
def self.call
|
4
|
+
response = client.threads.create(parameters: {})
|
5
|
+
|
6
|
+
response["id"]
|
7
|
+
end
|
8
|
+
|
9
|
+
private_class_method def self.client
|
10
|
+
@client ||= OpenAI::Client.new(
|
11
|
+
access_token: AI::Engine::Engine.config.openai_access_token,
|
12
|
+
organization_id: AI::Engine::Engine.config.openai_organization_id,
|
13
|
+
log_errors: Rails.env.development? || Rails.env.test?,
|
14
|
+
request_timeout: 2.minutes.to_i
|
15
|
+
)
|
16
|
+
end
|
17
|
+
end
|
@@ -0,0 +1 @@
|
|
1
|
+
require "openai"
|
@@ -0,0 +1,12 @@
|
|
1
|
+
class CreateAIEngineAssistants < ActiveRecord::Migration[7.1]
|
2
|
+
def change
|
3
|
+
create_table :ai_engine_assistants do |t|
|
4
|
+
t.string :remote_id
|
5
|
+
t.belongs_to :assistable, polymorphic: true
|
6
|
+
|
7
|
+
t.timestamps
|
8
|
+
end
|
9
|
+
|
10
|
+
add_index :ai_engine_assistants, %i[assistable_type assistable_id remote_id], unique: true
|
11
|
+
end
|
12
|
+
end
|
@@ -0,0 +1,11 @@
|
|
1
|
+
class CreateAIEngineRuns < ActiveRecord::Migration[7.1]
|
2
|
+
def change
|
3
|
+
create_table :ai_engine_runs do |t|
|
4
|
+
t.string :remote_id
|
5
|
+
t.references :ai_engine_assistant, foreign_key: true
|
6
|
+
t.references :ai_engine_assistant_thread, foreign_key: true
|
7
|
+
|
8
|
+
t.timestamps
|
9
|
+
end
|
10
|
+
end
|
11
|
+
end
|
@@ -0,0 +1,16 @@
|
|
1
|
+
class CreateAIEngineMessages < ActiveRecord::Migration[7.1]
|
2
|
+
def change
|
3
|
+
create_table :ai_engine_messages do |t|
|
4
|
+
t.string :remote_id
|
5
|
+
t.references :ai_engine_run, foreign_key: true
|
6
|
+
t.references :messageable, polymorphic: true
|
7
|
+
t.integer :role, null: false, default: 0
|
8
|
+
t.string :content, null: false
|
9
|
+
t.string :model
|
10
|
+
t.integer :prompt_token_usage
|
11
|
+
t.integer :completion_token_usage
|
12
|
+
|
13
|
+
t.timestamps
|
14
|
+
end
|
15
|
+
end
|
16
|
+
end
|
@@ -0,0 +1,58 @@
|
|
1
|
+
module AI
|
2
|
+
module Engine
|
3
|
+
DOLLAR_COST_PER_1K_TOKENS = {
|
4
|
+
"gpt-3.5-turbo" => {
|
5
|
+
"input" => 0.0005,
|
6
|
+
"output" => 0.0015
|
7
|
+
},
|
8
|
+
"gpt-4" => {
|
9
|
+
"input" => 0.03,
|
10
|
+
"output" => 0.06
|
11
|
+
},
|
12
|
+
"gpt-4-turbo" => {
|
13
|
+
"input" => 0.01,
|
14
|
+
"output" => 0.03
|
15
|
+
},
|
16
|
+
"gpt-4o" => {
|
17
|
+
"input" => 0.005,
|
18
|
+
"output" => 0.015
|
19
|
+
}
|
20
|
+
}.freeze
|
21
|
+
MODEL_OPTIONS = DOLLAR_COST_PER_1K_TOKENS.keys.freeze
|
22
|
+
|
23
|
+
def self.setup(&)
|
24
|
+
Engine.setup(&)
|
25
|
+
end
|
26
|
+
|
27
|
+
class Engine < ::Rails::Engine
|
28
|
+
isolate_namespace AI::Engine
|
29
|
+
|
30
|
+
class Configuration
|
31
|
+
attr_accessor :openai_access_token, :openai_organization_id
|
32
|
+
|
33
|
+
def initialize
|
34
|
+
@openai_access_token = nil
|
35
|
+
@openai_organization_id = nil
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
config.generators do |g|
|
40
|
+
g.test_framework :rspec
|
41
|
+
g.fixture_replacement :factory_bot
|
42
|
+
g.factory_bot dir: "spec/factories"
|
43
|
+
end
|
44
|
+
|
45
|
+
def self.setup(&block)
|
46
|
+
@config ||= AI::Engine::Engine::Configuration.new
|
47
|
+
|
48
|
+
yield @config if block
|
49
|
+
|
50
|
+
@config
|
51
|
+
end
|
52
|
+
|
53
|
+
def self.config
|
54
|
+
@config || setup
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
data/lib/ai/engine/version.rb
CHANGED
data/lib/ai/engine.rb
CHANGED
metadata
CHANGED
@@ -1,67 +1,74 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ai-engine
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0
|
4
|
+
version: 0.3.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Alex Rudall
|
8
8
|
autorequire:
|
9
|
-
bindir:
|
9
|
+
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-
|
11
|
+
date: 2024-09-16 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
|
-
name:
|
14
|
+
name: ruby-openai
|
15
15
|
requirement: !ruby/object:Gem::Requirement
|
16
16
|
requirements:
|
17
17
|
- - "~>"
|
18
18
|
- !ruby/object:Gem::Version
|
19
|
-
version:
|
20
|
-
type: :
|
19
|
+
version: 7.1.0
|
20
|
+
type: :runtime
|
21
21
|
prerelease: false
|
22
22
|
version_requirements: !ruby/object:Gem::Requirement
|
23
23
|
requirements:
|
24
24
|
- - "~>"
|
25
25
|
- !ruby/object:Gem::Version
|
26
|
-
version:
|
27
|
-
|
28
|
-
name: rake
|
29
|
-
requirement: !ruby/object:Gem::Requirement
|
30
|
-
requirements:
|
31
|
-
- - "~>"
|
32
|
-
- !ruby/object:Gem::Version
|
33
|
-
version: '13.2'
|
34
|
-
type: :development
|
35
|
-
prerelease: false
|
36
|
-
version_requirements: !ruby/object:Gem::Requirement
|
37
|
-
requirements:
|
38
|
-
- - "~>"
|
39
|
-
- !ruby/object:Gem::Version
|
40
|
-
version: '13.2'
|
41
|
-
description: You're probably looking for the real ai-engine gem at https://insertrobot.com
|
26
|
+
version: 7.1.0
|
27
|
+
description: A Rails Engine from the creator of ruby-openai.
|
42
28
|
email:
|
43
|
-
-
|
29
|
+
- hello@alexrudall.com
|
44
30
|
executables: []
|
45
31
|
extensions: []
|
46
32
|
extra_rdoc_files: []
|
47
33
|
files:
|
48
|
-
- ".gitignore"
|
49
|
-
- Gemfile
|
50
|
-
- LICENSE.txt
|
51
34
|
- README.md
|
52
35
|
- Rakefile
|
53
|
-
- ai
|
54
|
-
-
|
55
|
-
-
|
36
|
+
- app/models/ai/engine/application_record.rb
|
37
|
+
- app/models/ai/engine/assistant.rb
|
38
|
+
- app/models/ai/engine/assistant_thread.rb
|
39
|
+
- app/models/ai/engine/chat.rb
|
40
|
+
- app/models/ai/engine/message.rb
|
41
|
+
- app/models/ai/engine/run.rb
|
42
|
+
- app/models/concerns/ai/engine/assistable.rb
|
43
|
+
- app/models/concerns/ai/engine/chattable.rb
|
44
|
+
- app/models/concerns/ai/engine/remote_id_validatable.rb
|
45
|
+
- app/models/concerns/ai/engine/threadable.rb
|
46
|
+
- app/services/ai/engine/openai/assistants/create.rb
|
47
|
+
- app/services/ai/engine/openai/assistants/retrieve.rb
|
48
|
+
- app/services/ai/engine/openai/assistants/update.rb
|
49
|
+
- app/services/ai/engine/openai/chats/stream.rb
|
50
|
+
- app/services/ai/engine/openai/messages/create.rb
|
51
|
+
- app/services/ai/engine/openai/runs/create.rb
|
52
|
+
- app/services/ai/engine/openai/runs/retrieve.rb
|
53
|
+
- app/services/ai/engine/openai/threads/create.rb
|
54
|
+
- config/initializers/inflections.rb
|
55
|
+
- config/initializers/openai.rb
|
56
|
+
- db/migrate/20240528153439_create_ai_engine_assistants.rb
|
57
|
+
- db/migrate/20240530121855_create_ai_engine_assistant_threads.rb
|
58
|
+
- db/migrate/20240530130834_create_ai_engine_runs.rb
|
59
|
+
- db/migrate/20240530135629_create_ai_engine_messages.rb
|
60
|
+
- db/migrate/20240619150010_create_ai_engine_chats.rb
|
56
61
|
- lib/ai/engine.rb
|
62
|
+
- lib/ai/engine/engine.rb
|
57
63
|
- lib/ai/engine/version.rb
|
58
|
-
homepage: https://
|
64
|
+
homepage: https://railsai.com/docs/installation
|
59
65
|
licenses:
|
60
66
|
- MIT
|
61
67
|
metadata:
|
62
|
-
|
63
|
-
|
64
|
-
|
68
|
+
homepage_uri: https://railsai.com/docs/installation
|
69
|
+
source_code_uri: https://github.com/alexrudall/ai-engine/
|
70
|
+
changelog_uri: https://github.com/alexrudall/ai-engine/blob/main/CHANGELOG.md
|
71
|
+
post_install_message:
|
65
72
|
rdoc_options: []
|
66
73
|
require_paths:
|
67
74
|
- lib
|
@@ -69,7 +76,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
69
76
|
requirements:
|
70
77
|
- - ">="
|
71
78
|
- !ruby/object:Gem::Version
|
72
|
-
version: '0'
|
79
|
+
version: '3.0'
|
73
80
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
74
81
|
requirements:
|
75
82
|
- - ">="
|
@@ -79,5 +86,5 @@ requirements: []
|
|
79
86
|
rubygems_version: 3.5.11
|
80
87
|
signing_key:
|
81
88
|
specification_version: 4
|
82
|
-
summary:
|
89
|
+
summary: The easiest way to get AI into your Rails app.
|
83
90
|
test_files: []
|
data/.gitignore
DELETED
data/Gemfile
DELETED
data/LICENSE.txt
DELETED
@@ -1,21 +0,0 @@
|
|
1
|
-
The MIT License (MIT)
|
2
|
-
|
3
|
-
Copyright (c) 2024 Alex Rudall
|
4
|
-
|
5
|
-
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6
|
-
of this software and associated documentation files (the "Software"), to deal
|
7
|
-
in the Software without restriction, including without limitation the rights
|
8
|
-
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9
|
-
copies of the Software, and to permit persons to whom the Software is
|
10
|
-
furnished to do so, subject to the following conditions:
|
11
|
-
|
12
|
-
The above copyright notice and this permission notice shall be included in
|
13
|
-
all copies or substantial portions of the Software.
|
14
|
-
|
15
|
-
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16
|
-
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17
|
-
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18
|
-
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19
|
-
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20
|
-
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
21
|
-
THE SOFTWARE.
|
data/ai-engine.gemspec
DELETED
@@ -1,37 +0,0 @@
|
|
1
|
-
# coding: utf-8
|
2
|
-
lib = File.expand_path('../lib', __FILE__)
|
3
|
-
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
4
|
-
require 'ai/engine/version'
|
5
|
-
|
6
|
-
Gem::Specification.new do |spec|
|
7
|
-
spec.name = "ai-engine"
|
8
|
-
spec.version = Ai::Engine::VERSION
|
9
|
-
spec.authors = ["Alex Rudall"]
|
10
|
-
spec.email = ["alex@insertrobot.com"]
|
11
|
-
|
12
|
-
spec.summary = %q{Placeholder for the official ai-engine gem}
|
13
|
-
spec.description = %q{You're probably looking for the real ai-engine gem at https://insertrobot.com}
|
14
|
-
spec.homepage = "https://github.com/alexrudall/ai-engine-placeholder"
|
15
|
-
spec.license = "MIT"
|
16
|
-
|
17
|
-
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
|
18
|
-
# to allow pushing to a single host or delete this section to allow pushing to any host.
|
19
|
-
if spec.respond_to?(:metadata)
|
20
|
-
spec.metadata['allowed_push_host'] = "https://rubygems.org"
|
21
|
-
else
|
22
|
-
raise "RubyGems 2.0 or newer is required to protect against " \
|
23
|
-
"public gem pushes."
|
24
|
-
end
|
25
|
-
|
26
|
-
spec.post_install_message = "/!\\ ಠ‿ಠ Watch out ! This is not the real ai-engine gem - get the real one from insertrobot.com! /!\\"
|
27
|
-
|
28
|
-
spec.files = `git ls-files -z`.split("\x0").reject do |f|
|
29
|
-
f.match(%r{^(test|spec|features)/})
|
30
|
-
end
|
31
|
-
spec.bindir = "exe"
|
32
|
-
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
|
33
|
-
spec.require_paths = ["lib"]
|
34
|
-
|
35
|
-
spec.add_development_dependency "bundler", "~> 2.5"
|
36
|
-
spec.add_development_dependency "rake", "~> 13.2"
|
37
|
-
end
|
data/bin/console
DELETED
@@ -1,14 +0,0 @@
|
|
1
|
-
#!/usr/bin/env ruby
|
2
|
-
|
3
|
-
require "bundler/setup"
|
4
|
-
require "ai/engine"
|
5
|
-
|
6
|
-
# You can add fixtures and/or initialization code here to make experimenting
|
7
|
-
# with your gem easier. You can also use a different console, if you like.
|
8
|
-
|
9
|
-
# (If you use this, don't forget to add pry to your Gemfile!)
|
10
|
-
# require "pry"
|
11
|
-
# Pry.start
|
12
|
-
|
13
|
-
require "irb"
|
14
|
-
IRB.start(__FILE__)
|