activeai 0.1.0 → 0.1.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/Gemfile.lock +75 -0
- data/README.md +128 -2
- data/SESSIONS.md +45 -0
- data/activeai.gemspec +2 -5
- data/lib/activeai/behavior/base.rb +4 -0
- data/lib/activeai/behavior/llm/conversation.rb +41 -0
- data/lib/activeai/behavior/llm/follow_structured_examples.rb +34 -0
- data/lib/activeai/behavior/llm/unstructured.rb +3 -0
- data/lib/activeai/behavior/llm.rb +29 -0
- data/lib/activeai/behavior.rb +9 -0
- data/lib/activeai/configuration.rb +7 -0
- data/lib/activeai/controller.rb +46 -0
- data/lib/activeai/neural_network/gpt3.rb +48 -0
- data/lib/activeai/neural_network.rb +4 -0
- data/lib/activeai/router.rb +63 -0
- data/lib/activeai/version.rb +1 -1
- data/lib/activeai.rb +13 -1
- metadata +44 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: d5a985f1783335da05fa3165fbe352823a431d206c2586a334eff561874e88d2
|
4
|
+
data.tar.gz: bce08aa0e01e59e780ad8a47fd632e10567ae7c25dd8d88b295f61344d4a044c
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: db6b75600a2cf40c9b872c62a64316ffd36c69bd53856b3b05aed9865332450ed54c4b16690c7225ec313fb86d1634a6baade87cec4ab11464e0f9b4fb48be09
|
7
|
+
data.tar.gz: 383a17006039e02b64507bba0c95648bef990a17ce7f2ad676ab21fd0505f62a9eada8c5484e7a170215200a770d58e27c924e1a00be397f382d4c4364f39d59
|
data/Gemfile.lock
ADDED
@@ -0,0 +1,75 @@
|
|
1
|
+
PATH
|
2
|
+
remote: .
|
3
|
+
specs:
|
4
|
+
activeai (0.1.0)
|
5
|
+
faraday
|
6
|
+
true
|
7
|
+
|
8
|
+
GEM
|
9
|
+
remote: https://rubygems.org/
|
10
|
+
specs:
|
11
|
+
ast (2.4.2)
|
12
|
+
diff-lcs (1.5.0)
|
13
|
+
faraday (2.7.2)
|
14
|
+
faraday-net_http (>= 2.0, < 3.1)
|
15
|
+
ruby2_keywords (>= 0.0.4)
|
16
|
+
faraday-net_http (3.0.2)
|
17
|
+
ffi (1.15.5)
|
18
|
+
json (2.6.3)
|
19
|
+
parallel (1.22.1)
|
20
|
+
parser (3.1.3.0)
|
21
|
+
ast (~> 2.4.1)
|
22
|
+
rainbow (3.1.1)
|
23
|
+
rake (13.0.6)
|
24
|
+
rb-fsevent (0.11.2)
|
25
|
+
rb-inotify (0.10.1)
|
26
|
+
ffi (~> 1.0)
|
27
|
+
regexp_parser (2.6.1)
|
28
|
+
rexml (3.2.5)
|
29
|
+
rspec (3.12.0)
|
30
|
+
rspec-core (~> 3.12.0)
|
31
|
+
rspec-expectations (~> 3.12.0)
|
32
|
+
rspec-mocks (~> 3.12.0)
|
33
|
+
rspec-core (3.12.0)
|
34
|
+
rspec-support (~> 3.12.0)
|
35
|
+
rspec-expectations (3.12.1)
|
36
|
+
diff-lcs (>= 1.2.0, < 2.0)
|
37
|
+
rspec-support (~> 3.12.0)
|
38
|
+
rspec-mocks (3.12.1)
|
39
|
+
diff-lcs (>= 1.2.0, < 2.0)
|
40
|
+
rspec-support (~> 3.12.0)
|
41
|
+
rspec-support (3.12.0)
|
42
|
+
rubocop (1.41.1)
|
43
|
+
json (~> 2.3)
|
44
|
+
parallel (~> 1.10)
|
45
|
+
parser (>= 3.1.2.1)
|
46
|
+
rainbow (>= 2.2.2, < 4.0)
|
47
|
+
regexp_parser (>= 1.8, < 3.0)
|
48
|
+
rexml (>= 3.2.5, < 4.0)
|
49
|
+
rubocop-ast (>= 1.23.0, < 2.0)
|
50
|
+
ruby-progressbar (~> 1.7)
|
51
|
+
unicode-display_width (>= 1.4.0, < 3.0)
|
52
|
+
rubocop-ast (1.24.1)
|
53
|
+
parser (>= 3.1.1.0)
|
54
|
+
ruby-progressbar (1.11.0)
|
55
|
+
ruby2_keywords (0.0.5)
|
56
|
+
sass (3.7.4)
|
57
|
+
sass-listen (~> 4.0.0)
|
58
|
+
sass-listen (4.0.0)
|
59
|
+
rb-fsevent (~> 0.9, >= 0.9.4)
|
60
|
+
rb-inotify (~> 0.9, >= 0.9.7)
|
61
|
+
true (2.2.2)
|
62
|
+
sass (~> 3.4)
|
63
|
+
unicode-display_width (2.3.0)
|
64
|
+
|
65
|
+
PLATFORMS
|
66
|
+
arm64-darwin-21
|
67
|
+
|
68
|
+
DEPENDENCIES
|
69
|
+
activeai!
|
70
|
+
rake (~> 13.0)
|
71
|
+
rspec (~> 3.0)
|
72
|
+
rubocop (~> 1.21)
|
73
|
+
|
74
|
+
BUNDLED WITH
|
75
|
+
2.3.20
|
data/README.md
CHANGED
@@ -2,6 +2,132 @@
|
|
2
2
|
|
3
3
|
## AI AS COMPUTE
|
4
4
|
|
5
|
-
Artificial Intelligence
|
5
|
+
Artificial Intelligence the Rails way.
|
6
6
|
|
7
|
-
Supported by
|
7
|
+
Supported by [gamebreakers community](https://gamebreakers.org) - AI is for everyone <3
|
8
|
+
|
9
|
+
# Usage
|
10
|
+
|
11
|
+
## L0: Interacting directly with neural networks
|
12
|
+
|
13
|
+
### GPT3
|
14
|
+
|
15
|
+
```
|
16
|
+
gpt3 = ActiveAI::NeuralNetwork::GPT3.new(ENV['OPEN_AI_TOKEN'])
|
17
|
+
prompt = "Never gonna give you up, never gonna"
|
18
|
+
puts gpt3.complete(prompt: prompt)['choices'].first['text']
|
19
|
+
#=> 'let you down, never gonna run around and hurt you.'
|
20
|
+
```
|
21
|
+
|
22
|
+
### TODO: others
|
23
|
+
|
24
|
+
## L1: Using behavior patterns to interact with neural networks
|
25
|
+
|
26
|
+
### With structured examples
|
27
|
+
|
28
|
+
```
|
29
|
+
llm = ActiveAI::NeuralNetwork::GPT3.new(ENV['OPEN_AI_TOKEN'], model: 'text-curie-001')
|
30
|
+
behavior = ActiveAI::Behavior::LLM::FollowStructuredExamples.new(llm, {
|
31
|
+
instruction: 'Write a comma-separated list of nouns in the following sentences:',
|
32
|
+
examples: [
|
33
|
+
{ sentence: 'I have some veggie burgers in the freezer!', nouns: 'burgers, freezer' }
|
34
|
+
# a couple of examples improves performance!
|
35
|
+
]
|
36
|
+
})
|
37
|
+
result = behavior.call({ sentence: 'My tomatoes are in bloom this summer, time for jam!' }, extract: %W[nouns])
|
38
|
+
puts result
|
39
|
+
#=> 'tomatoes, jam'
|
40
|
+
```
|
41
|
+
|
42
|
+
### TODO: with other patterns
|
43
|
+
|
44
|
+
### TODO: auto-detected behavior pattern from config
|
45
|
+
|
46
|
+
## L2: Rails magic for neural networks
|
47
|
+
|
48
|
+
**This is the fun part!**
|
49
|
+
|
50
|
+
Suppose you have the following files:
|
51
|
+
|
52
|
+
### config/routes/bank.yml
|
53
|
+
|
54
|
+
```
|
55
|
+
instruction:
|
56
|
+
For a given Match request, choose where to send it via the "Route" field and choose the params that fit best.
|
57
|
+
If nothing matches, the "Route" field should be None.
|
58
|
+
examples:
|
59
|
+
- Match: Check the weather
|
60
|
+
Route: none
|
61
|
+
- Match: Send R100 to Jomiro
|
62
|
+
Route: bank#transfer_money
|
63
|
+
Params: { beneficiaryId: 12345, amount: 100.0 }
|
64
|
+
- Match: Pay Mom R245 for groceries
|
65
|
+
Route: bank#transfer_money
|
66
|
+
Params: { beneficiaryId: 98765, amount: 245.0, reference: "Groceries <3" }
|
67
|
+
- Match: What's my bank balance?
|
68
|
+
Route: bank#check_balance
|
69
|
+
```
|
70
|
+
|
71
|
+
### controllers/bank_controller.rb
|
72
|
+
|
73
|
+
```
|
74
|
+
class BankController < ActiveAI::Controller
|
75
|
+
auto_load_routing # loads routing config from config/routes/bank.yml
|
76
|
+
load_routing(config) # alternatively, loads routing config from a hash
|
77
|
+
|
78
|
+
def check_balance
|
79
|
+
# Make an API request to GET bank.com/balance and return some useful data
|
80
|
+
end
|
81
|
+
|
82
|
+
def transfer_money
|
83
|
+
# Make an API request to POST bank.com/transfer with params and return some useful data
|
84
|
+
end
|
85
|
+
end
|
86
|
+
```
|
87
|
+
|
88
|
+
### How to use it
|
89
|
+
|
90
|
+
#### Running a controller directly
|
91
|
+
|
92
|
+
Using the routing yml file and an LLM, the controller will turn any text request into an action to run, with parameters to supply, and then execute it.
|
93
|
+
|
94
|
+
```ruby
|
95
|
+
controller = BankController.new
|
96
|
+
controller.call("Pay Mom R127 for groceries")
|
97
|
+
# => responds with the result of an action that ran with params
|
98
|
+
```
|
99
|
+
|
100
|
+
#### Routing an unknown request with multiple controllers
|
101
|
+
|
102
|
+
It's possible to instantiate an `ActiveAI::Router`, load up the examples from multiple controllers, and then have it handle many types of requests. It does this in a similar way to how the controller uses an LLM to map to action and params, but it concatenates all controller routing examples and strips out the parameter preparation step for efficiency, since the controller handles this.
|
103
|
+
|
104
|
+
```ruby
|
105
|
+
router = ActiveAI::Router.new
|
106
|
+
|
107
|
+
# load all auto-detected routes:
|
108
|
+
router.auto_load_routing(Rails.root.join('config','routes')) # loads all .yml files as controller examples
|
109
|
+
|
110
|
+
# or, load config via path or manually from a config hash:
|
111
|
+
router.add_controller_routing_from_path(Rails.root.join("config", "routes", "bank.yml"))
|
112
|
+
slack_routing = YAML::load(File.read(Rails.root.join("config", "routes", "slack.yml"))
|
113
|
+
router.add_controller_routing(slack_routing)
|
114
|
+
```
|
115
|
+
|
116
|
+
Once the routes are loaded, requests will be passed to a matched controller, if any matches. You can match and run requests like this:
|
117
|
+
|
118
|
+
```ruby
|
119
|
+
router.call("Send a Slack message saying 'Hey everyone!") # returns the successful action
|
120
|
+
router.call("Transfer R5,000 to savings") # returns the successful action
|
121
|
+
router.call("Visit grandma") # returns nil
|
122
|
+
```
|
123
|
+
|
124
|
+
Or if you just want to find the controller:
|
125
|
+
|
126
|
+
```ruby
|
127
|
+
router.find_controller("Transfer money out of savings")
|
128
|
+
# => BankController
|
129
|
+
```
|
130
|
+
|
131
|
+
# Please help make this better!
|
132
|
+
|
133
|
+
This is an experiment to push the boundaries of "AI as compute" and it would be awesome to have more eager explorers to play with!
|
data/SESSIONS.md
ADDED
@@ -0,0 +1,45 @@
|
|
1
|
+
# 31 December 2022
|
2
|
+
|
3
|
+
## What I did
|
4
|
+
|
5
|
+
- Fix active_ai/controller.rb to reflect on the correct detected path name
|
6
|
+
- Ran a full test with thinkspawn creating DALLE art and sending Slack messages - it's really cool!
|
7
|
+
- Released v0.1.1!!
|
8
|
+
|
9
|
+
## What I learned
|
10
|
+
|
11
|
+
- Gonna need a master router and make the sub-routers just about param prep
|
12
|
+
- Curie doesn't do great with routing many controllers. I had to rename e.g. slack.notify to slack.send_message because that seemed to be Curie's default.. DaVinci works better but also some issues i think
|
13
|
+
|
14
|
+
## What I could do next
|
15
|
+
|
16
|
+
- add whisper as a neural network via replicate.com
|
17
|
+
- add embeddings as a concept - not sure what/how yet
|
18
|
+
- make better examples for routing, it's a bit iffy right now
|
19
|
+
- add a better way in code to test controllers test cases etc.
|
20
|
+
- test out if the code models work better than the text ones
|
21
|
+
|
22
|
+
# 30 December 2022
|
23
|
+
|
24
|
+
## What I did
|
25
|
+
|
26
|
+
- Built a basic gem!
|
27
|
+
- Added behaviors, specifically for structured trained examples
|
28
|
+
- Added rails controllers and routers which use structured trained examples
|
29
|
+
- Added a cool prototype conversation/chat structure, and a cool idea on how to make operators ponder!
|
30
|
+
|
31
|
+
## What I learned
|
32
|
+
|
33
|
+
- I need to learn more about modules, classes and gem structuring
|
34
|
+
- Rails Engines is going to be a thing, probably, maybe
|
35
|
+
- ActiveAI is a boring name. How about something that's more expansive, welcoming, inclusive, social?
|
36
|
+
|
37
|
+
## What I could do next
|
38
|
+
|
39
|
+
- Run a real example via thinkspawn and get it all working
|
40
|
+
- Make active_ai/behavior.rb#from_config with a sensible default behavior and test a few others
|
41
|
+
- Make the code work like the readme says it does for rails stuff (controller naming and folder structure etc.) - might need a Railtie?
|
42
|
+
- Publish v0.1.1
|
43
|
+
- Update the configuration.rb mechanic a bit
|
44
|
+
- Load up all OpenAI's examples as instances of one of a couple of behavior types
|
45
|
+
- Add session contexts to the router registration so it's flexible
|
data/activeai.gemspec
CHANGED
@@ -30,9 +30,6 @@ Gem::Specification.new do |spec|
|
|
30
30
|
spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
|
31
31
|
spec.require_paths = ["lib"]
|
32
32
|
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
# For more information and examples about making a new gem, check out our
|
37
|
-
# guide at: https://bundler.io/guides/creating_gem.html
|
33
|
+
spec.add_dependency 'faraday'
|
34
|
+
spec.add_dependency gem 'faraday-multipart'
|
38
35
|
end
|
@@ -0,0 +1,41 @@
|
|
1
|
+
class ActiveAI::Behavior::LLM::Conversation < ActiveAI::Behavior::LLM
|
2
|
+
# i need alerts if this stuff gets caught in a loop! like pondering->noticing and never stopping or something
|
3
|
+
|
4
|
+
def initialize(llm, state)
|
5
|
+
super(llm)
|
6
|
+
@state = state
|
7
|
+
# TODO raise errors if not expected thingies available in the config
|
8
|
+
@state['conversation'] ||= ""
|
9
|
+
end
|
10
|
+
|
11
|
+
def history
|
12
|
+
@state['conversation']
|
13
|
+
end
|
14
|
+
|
15
|
+
def prompt
|
16
|
+
[
|
17
|
+
@state['instruction'],
|
18
|
+
@state['examples'].map do |example|
|
19
|
+
"Example Conversation:\n" + example['conversation']
|
20
|
+
# TODO use the label key they provide in the yml file
|
21
|
+
end,
|
22
|
+
"Conversation:\n" + @state['conversation']
|
23
|
+
].join(SEPARATOR)
|
24
|
+
end
|
25
|
+
|
26
|
+
def add(speaker, message)
|
27
|
+
comms = "#{speaker}: #{message.strip}"
|
28
|
+
@state['conversation'] += comms + "\n"
|
29
|
+
end
|
30
|
+
|
31
|
+
def get_reply(prefix: nil)
|
32
|
+
@state['conversation'] += prefix if prefix
|
33
|
+
|
34
|
+
complete_result = complete(prompt, stop: "\n")
|
35
|
+
completion = complete_result['choices'][0]['text']
|
36
|
+
|
37
|
+
@state['conversation'] += completion + "\n"
|
38
|
+
|
39
|
+
completion
|
40
|
+
end
|
41
|
+
end
|
@@ -0,0 +1,34 @@
|
|
1
|
+
class ActiveAI::Behavior::LLM::FollowStructuredExamples < ActiveAI::Behavior::LLM
|
2
|
+
# state is an instruction, and a list of examples with key/value pairs
|
3
|
+
# would be nice to do casting, but not now i dont think..
|
4
|
+
|
5
|
+
def initialize(llm, state)
|
6
|
+
super(llm)
|
7
|
+
@state = state
|
8
|
+
# TODO raise errors if not expected thingies available in the config
|
9
|
+
end
|
10
|
+
|
11
|
+
def base_prompt
|
12
|
+
[
|
13
|
+
@state['instruction'],
|
14
|
+
@state['examples'].map do |example|
|
15
|
+
example.map do |key, value|
|
16
|
+
"#{key}: #{value}"
|
17
|
+
end.join("\n")
|
18
|
+
end.join(SEPARATOR)
|
19
|
+
].join(SEPARATOR)
|
20
|
+
end
|
21
|
+
|
22
|
+
def call(input={}, extract: []) # TODO cool splat stuff?
|
23
|
+
prompt = base_prompt + SEPARATOR
|
24
|
+
|
25
|
+
prompt += input.map do |key, value|
|
26
|
+
"#{key}: #{value}"
|
27
|
+
end.join("\n")
|
28
|
+
|
29
|
+
complete_result = complete(prompt)
|
30
|
+
completion = complete_result['choices'][0]['text']
|
31
|
+
|
32
|
+
return extract_keys(completion, extract)
|
33
|
+
end
|
34
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
class ActiveAI::Behavior::LLM < ActiveAI::Behavior::Base
|
2
|
+
def initialize(llm)
|
3
|
+
@llm = llm
|
4
|
+
end
|
5
|
+
|
6
|
+
def complete(prompt, stop: nil)
|
7
|
+
@llm.complete(prompt: prompt, stop: stop)
|
8
|
+
end
|
9
|
+
|
10
|
+
SEPARATOR = "\n\n###\n\n"
|
11
|
+
|
12
|
+
def extract_keys(completion, extract)
|
13
|
+
matcher_string = extract.map{ |key| "#{key}:(.*)" }.join
|
14
|
+
matches = completion.match(/#{matcher_string}/m)
|
15
|
+
|
16
|
+
if matches
|
17
|
+
matches[1..-1].map.with_index do |value, index|
|
18
|
+
# TODO this seems hacky, gotta be a better way to extract?
|
19
|
+
[extract[index], value.strip]
|
20
|
+
end.to_h
|
21
|
+
else
|
22
|
+
nil
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
require_relative "llm/conversation"
|
28
|
+
require_relative "llm/unstructured"
|
29
|
+
require_relative "llm/follow_structured_examples"
|
@@ -0,0 +1,9 @@
|
|
1
|
+
class ActiveAI::Behavior
|
2
|
+
def self.from_config(config)
|
3
|
+
# TODO detect and load the right pattern
|
4
|
+
# right now it's just "structuredexamples"
|
5
|
+
# this is just syntactic sugar, it's fine for now until you _need_ it
|
6
|
+
end
|
7
|
+
end
|
8
|
+
|
9
|
+
require_relative "behavior/base"
|
@@ -0,0 +1,46 @@
|
|
1
|
+
class ActiveAI::Controller
|
2
|
+
|
3
|
+
class_attribute :routing_behavior
|
4
|
+
|
5
|
+
def self.auto_load_routing
|
6
|
+
routes_path = Rails.root.join('config', 'routes', self.to_s.underscore.gsub('_controller', '.yml'))
|
7
|
+
routes_config = YAML::load(File.read(routes_path))
|
8
|
+
self.load_routing(routes_config)
|
9
|
+
end
|
10
|
+
|
11
|
+
def self.load_routing(routes_config)
|
12
|
+
@llm = ActiveAI::NeuralNetwork::GPT3.new(ActiveAI.config[:gpt3_token], model: 'text-curie-001', temperature: 0.2)
|
13
|
+
self.routing_behavior = ActiveAI::Behavior::LLM::FollowStructuredExamples.new(@llm, routes_config)
|
14
|
+
end
|
15
|
+
|
16
|
+
attr_accessor :params
|
17
|
+
|
18
|
+
def prepare_action(request)
|
19
|
+
routing = self.class.routing_behavior.call({ 'Request' => request }, extract: %W[Route Params])
|
20
|
+
controller_name, action_name = routing['Route'].split('#')
|
21
|
+
# TODO verify it's the right controller and the action name exists and it's not a reserved / internal thing
|
22
|
+
return {
|
23
|
+
action: action_name,
|
24
|
+
params: JSON.parse(routing['Params']) # TODO cast as JSON earlier? e.g. in config of the behavior?
|
25
|
+
}
|
26
|
+
end
|
27
|
+
|
28
|
+
def call(request)
|
29
|
+
mapped_request = prepare_action(request)
|
30
|
+
|
31
|
+
if mapped_request
|
32
|
+
return run_action(mapped_request[:action], mapped_request[:params])
|
33
|
+
else
|
34
|
+
return nil
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
def run_action(action_name, params)
|
39
|
+
@params = params
|
40
|
+
response = send(action_name)
|
41
|
+
# handle response somehow, or do we just dump JSON back?
|
42
|
+
end
|
43
|
+
|
44
|
+
# surely this is where the magic prep loading and unloading happens?
|
45
|
+
# i.e. the params deal with this
|
46
|
+
end
|
@@ -0,0 +1,48 @@
|
|
1
|
+
require 'faraday'
|
2
|
+
require 'faraday/net_http'
|
3
|
+
require 'faraday/multipart'
|
4
|
+
Faraday.default_adapter = :net_http
|
5
|
+
|
6
|
+
class ActiveAI::NeuralNetwork::GPT3 < ActiveAI::NeuralNetwork
|
7
|
+
|
8
|
+
DEFAULTS = {
|
9
|
+
model: 'text-davinci-003',
|
10
|
+
temperature: 0.7,
|
11
|
+
max_tokens: 1000
|
12
|
+
}
|
13
|
+
|
14
|
+
def initialize(token, uuid: 'system', max_tokens: DEFAULTS[:max_tokens], temperature: DEFAULTS[:temperature], model: DEFAULTS[:model])
|
15
|
+
@token = token
|
16
|
+
@uuid = uuid
|
17
|
+
@max_tokens = max_tokens
|
18
|
+
@temperature = temperature
|
19
|
+
@model = model
|
20
|
+
end
|
21
|
+
|
22
|
+
def json_connection
|
23
|
+
@json_connection ||= Faraday.new(
|
24
|
+
url: 'https://api.openai.com',
|
25
|
+
headers: { 'Authorization' => "Bearer #{@token}" }
|
26
|
+
) do |f|
|
27
|
+
f.request :json
|
28
|
+
f.response :json
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
def post(path, params={})
|
33
|
+
response = json_connection.post(path, params.merge({ user: @uuid }))
|
34
|
+
response.body
|
35
|
+
end
|
36
|
+
|
37
|
+
def complete(prompt:, stop: nil, suffix: nil) # TODO move the other stuff besides prompt out?
|
38
|
+
post("v1/completions", {
|
39
|
+
model: @model,
|
40
|
+
prompt: prompt,
|
41
|
+
suffix: suffix, # NOTE: doesn't work for fine-tuned models
|
42
|
+
stop: stop,
|
43
|
+
max_tokens: @max_tokens,
|
44
|
+
temperature: @temperature,
|
45
|
+
user: @uuid
|
46
|
+
})
|
47
|
+
end
|
48
|
+
end
|
@@ -0,0 +1,63 @@
|
|
1
|
+
class ActiveAI::Router
|
2
|
+
INSTRUCTION = 'For a given Match request, choose where to send it via the "Route" field. If nothing matches, the "Route" field should be None.'
|
3
|
+
UNMATCHED = { 'Match' => 'Create a NASA space program', 'Route' => 'None' }
|
4
|
+
|
5
|
+
def initialize
|
6
|
+
@routings = []
|
7
|
+
@llm = ActiveAI::NeuralNetwork::GPT3.new(ActiveAI.config[:gpt3_token], model: 'text-curie-001', temperature: 0.2)
|
8
|
+
end
|
9
|
+
|
10
|
+
def add_controller_routing(routing)
|
11
|
+
@routings << routing
|
12
|
+
end
|
13
|
+
|
14
|
+
def add_controller_routing_from_path(path)
|
15
|
+
routing = YAML::load(File.read(path))
|
16
|
+
add_controller_routing(routing)
|
17
|
+
end
|
18
|
+
|
19
|
+
def auto_load_routing(folder)
|
20
|
+
paths = Dir[folder.join("**", "*.yml")]
|
21
|
+
paths.each do |path|
|
22
|
+
add_controller_routing_from_path(path)
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
def behavior
|
27
|
+
config = {
|
28
|
+
'instruction' => INSTRUCTION,
|
29
|
+
'examples' => [UNMATCHED] + @routings.map do |routing|
|
30
|
+
routing['examples'].reject do |example|
|
31
|
+
example['Route'] == 'None'
|
32
|
+
end.map do |example|
|
33
|
+
example.slice('Match', 'Route')
|
34
|
+
end
|
35
|
+
end.flatten
|
36
|
+
}
|
37
|
+
|
38
|
+
ActiveAI::Behavior::LLM::FollowStructuredExamples.new(@llm, config)
|
39
|
+
end
|
40
|
+
|
41
|
+
def find_controller(request)
|
42
|
+
# should return constantized maybe?
|
43
|
+
routing = behavior.call({ 'Request' => request }, extract: %W[Route])
|
44
|
+
controller_name, action_name = routing['Route'].split('#')
|
45
|
+
|
46
|
+
if controller_name == "None" || action_name.blank?
|
47
|
+
return nil
|
48
|
+
else
|
49
|
+
return (controller_name + "_controller").classify.constantize
|
50
|
+
# TODO need protection (somewhere) from using controllers that aren't allowed
|
51
|
+
# maybe router has a whitelist? since we're taking user input
|
52
|
+
# idk problem for later not now
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
def call(request)
|
57
|
+
if controller = find_controller(request)
|
58
|
+
controller.new.call(request)
|
59
|
+
else
|
60
|
+
return nil
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
data/lib/activeai/version.rb
CHANGED
data/lib/activeai.rb
CHANGED
@@ -1,8 +1,20 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
+
require_relative "activeai/behavior"
|
4
|
+
require_relative "activeai/configuration"
|
5
|
+
require_relative "activeai/controller"
|
6
|
+
require_relative "activeai/neural_network"
|
7
|
+
require_relative "activeai/router"
|
3
8
|
require_relative "activeai/version"
|
4
9
|
|
5
10
|
module ActiveAI
|
6
11
|
class Error < StandardError; end
|
7
|
-
|
12
|
+
|
13
|
+
def self.config
|
14
|
+
{
|
15
|
+
gpt3_token: ENV['OPEN_AI_TOKEN']
|
16
|
+
}
|
17
|
+
end
|
18
|
+
|
8
19
|
end
|
20
|
+
|
metadata
CHANGED
@@ -1,15 +1,43 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: activeai
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- jeriko
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
12
|
-
dependencies:
|
11
|
+
date: 2023-01-06 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: faraday
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - ">="
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '0'
|
20
|
+
type: :runtime
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - ">="
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '0'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: 'true'
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - ">="
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '0'
|
34
|
+
type: :runtime
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - ">="
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '0'
|
13
41
|
description: A pack for building AI-backed routes and controllers, plus a collection
|
14
42
|
of helpers for GPT3, DALLE, Whisper, Stable Diffusion and more
|
15
43
|
email:
|
@@ -23,11 +51,24 @@ files:
|
|
23
51
|
- CHANGELOG.md
|
24
52
|
- CODE_OF_CONDUCT.md
|
25
53
|
- Gemfile
|
54
|
+
- Gemfile.lock
|
26
55
|
- LICENSE
|
27
56
|
- README.md
|
28
57
|
- Rakefile
|
58
|
+
- SESSIONS.md
|
29
59
|
- activeai.gemspec
|
30
60
|
- lib/activeai.rb
|
61
|
+
- lib/activeai/behavior.rb
|
62
|
+
- lib/activeai/behavior/base.rb
|
63
|
+
- lib/activeai/behavior/llm.rb
|
64
|
+
- lib/activeai/behavior/llm/conversation.rb
|
65
|
+
- lib/activeai/behavior/llm/follow_structured_examples.rb
|
66
|
+
- lib/activeai/behavior/llm/unstructured.rb
|
67
|
+
- lib/activeai/configuration.rb
|
68
|
+
- lib/activeai/controller.rb
|
69
|
+
- lib/activeai/neural_network.rb
|
70
|
+
- lib/activeai/neural_network/gpt3.rb
|
71
|
+
- lib/activeai/router.rb
|
31
72
|
- lib/activeai/version.rb
|
32
73
|
- sig/activeai.rbs
|
33
74
|
homepage: https://github.com/gamebreakers-org/activeai
|