intelli_agent 0.0.1 → 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/{ai.rb → intelli_agent/ai.rb} +8 -8
- data/lib/{ai/agent.rb → intelli_agent.rb} +10 -17
- metadata +4 -7
- data/lib/ai/agent.rb:Zone.Identifier +0 -0
- data/lib/ai/agent_test.rb_:Zone.Identifier +0 -0
- data/lib/ai.rb:Zone.Identifier +0 -0
- /data/lib/{ai → intelli_agent}/agent_test.rb_ +0 -0
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 729fa0b1b8a80ceaeb2568429fa7cbbe167f63d865d6beb1283daa62eaf4ca21
|
4
|
+
data.tar.gz: 505c4a6e21b9e4520b21eca38aa19cac782bbe4701acb02539f6e276baf3e04d
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: c395e1406aff8919b518122b75c28f666d1e7ecfcebb21951f04b92824d4edc165d25e4a8eb24cfccc1bf1f3c2934651dcd27dd33aac09a426f94394c0ea26f3
|
7
|
+
data.tar.gz: 5d0ac8a6696e167ea964c5fe8090b9d318b6742dbe15f88117f094e74332e8f472dad2ff2aeb6d47fa204de5b5e5f1cb9534cfa30aaa67eb7b6f285eed2e74c8
|
@@ -1,14 +1,14 @@
|
|
1
1
|
# In the future, this became a bus to more than one AI provider
|
2
2
|
module AI
|
3
|
-
BASIC_MODEL = ENV.fetch('OPENAI_BASIC_MODEL')
|
4
|
-
ADVANCED_MODEL = ENV.fetch('OPENAI_ADVANCED_MODEL')
|
3
|
+
BASIC_MODEL = 'gpt-4o-mini' # ENV.fetch('OPENAI_BASIC_MODEL')
|
4
|
+
ADVANCED_MODEL = 'gpt-4o' # ENV.fetch('OPENAI_ADVANCED_MODEL')
|
5
5
|
|
6
|
-
def
|
6
|
+
def embed(input, model: 'text-embedding-3-large')
|
7
7
|
response = OpenAI::Client.new.embeddings(parameters: { input:, model: })
|
8
8
|
response.dig('data', 0, 'embedding')
|
9
9
|
end
|
10
10
|
|
11
|
-
def
|
11
|
+
def single_prompt(prompt:, model: AI::BASIC_MODEL, response_format: nil)
|
12
12
|
parameters = { model:, messages: [{ role: 'user', content: prompt }] }
|
13
13
|
|
14
14
|
parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
|
@@ -17,7 +17,7 @@ module AI
|
|
17
17
|
response.dig('choices', 0, 'message', 'content').strip
|
18
18
|
end
|
19
19
|
|
20
|
-
def
|
20
|
+
def vision(prompt:, image_url:, response_format: nil)
|
21
21
|
messages = [{ type: :text, text: prompt },
|
22
22
|
{ type: :image_url, image_url: { url: image_url } }]
|
23
23
|
|
@@ -29,7 +29,7 @@ module AI
|
|
29
29
|
response.dig('choices', 0, 'message', 'content').strip
|
30
30
|
end
|
31
31
|
|
32
|
-
def
|
32
|
+
def single_chat(system:, user:, model: AI::BASIC_MODEL, response_format: nil)
|
33
33
|
parameters = { model:,
|
34
34
|
messages: [
|
35
35
|
{ role: 'system', content: system },
|
@@ -42,7 +42,7 @@ module AI
|
|
42
42
|
response.dig('choices', 0, 'message', 'content').strip
|
43
43
|
end
|
44
44
|
|
45
|
-
def
|
45
|
+
def chat(messages, model: AI::BASIC_MODEL, response_format: nil)
|
46
46
|
parameters = { model:, messages: }
|
47
47
|
parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
|
48
48
|
|
@@ -50,7 +50,7 @@ module AI
|
|
50
50
|
response.dig('choices', 0, 'message', 'content').strip
|
51
51
|
end
|
52
52
|
|
53
|
-
def
|
53
|
+
def models
|
54
54
|
OpenAI::Client.new.models.list
|
55
55
|
end
|
56
56
|
end
|
@@ -1,10 +1,14 @@
|
|
1
|
-
|
1
|
+
require 'openai'
|
2
|
+
require 'intelli_agent/ai'
|
3
|
+
|
4
|
+
class IntelliAgent
|
5
|
+
extend AI
|
2
6
|
attr_reader :assistant, :thread, :instructions, :vector_store_id
|
3
7
|
|
4
8
|
def initialize(assistant_id: nil, thread_id: nil, thread_instructions: nil, vector_store_id: nil)
|
5
9
|
@openai_client = OpenAI::Client.new
|
6
10
|
|
7
|
-
assistant_id ||= ENV.fetch('
|
11
|
+
assistant_id ||= ENV.fetch('OPENAI_ASSISTANT_ID')
|
8
12
|
@assistant = @openai_client.assistants.retrieve(id: assistant_id)
|
9
13
|
|
10
14
|
thread_params = {}
|
@@ -19,17 +23,10 @@ class AI::Agent
|
|
19
23
|
@instructions = thread_instructions || @assistant['instructions']
|
20
24
|
end
|
21
25
|
|
22
|
-
def add_message(text, role: 'user')
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
def messages
|
27
|
-
@openai_client.messages.list(thread_id: @thread['id'])
|
28
|
-
end
|
29
|
-
|
30
|
-
def last_message
|
31
|
-
messages['data'].first['content'].first['text']['value']
|
32
|
-
end
|
26
|
+
def add_message(text, role: 'user') = @openai_client.messages.create(thread_id: @thread['id'], parameters: { role: role, content: text })
|
27
|
+
def messages = @openai_client.messages.list(thread_id: @thread['id'])
|
28
|
+
def last_message = messages['data'].first['content'].first['text']['value']
|
29
|
+
def runs = @openai_client.runs.list(thread_id: @thread['id'])
|
33
30
|
|
34
31
|
def run(instructions: nil, additional_instructions: nil, additional_message: nil, model: nil, tool_choice: nil)
|
35
32
|
params = { assistant_id: @assistant['id'] }
|
@@ -64,8 +61,4 @@ class AI::Agent
|
|
64
61
|
end
|
65
62
|
end
|
66
63
|
end
|
67
|
-
|
68
|
-
def runs
|
69
|
-
@openai_client.runs.list(thread_id: @thread['id'])
|
70
|
-
end
|
71
64
|
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: intelli_agent
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Gedean Dias
|
@@ -31,12 +31,9 @@ extensions: []
|
|
31
31
|
extra_rdoc_files: []
|
32
32
|
files:
|
33
33
|
- README.md
|
34
|
-
- lib/
|
35
|
-
- lib/
|
36
|
-
- lib/ai
|
37
|
-
- lib/ai/agent.rb:Zone.Identifier
|
38
|
-
- lib/ai/agent_test.rb_
|
39
|
-
- lib/ai/agent_test.rb_:Zone.Identifier
|
34
|
+
- lib/intelli_agent.rb
|
35
|
+
- lib/intelli_agent/agent_test.rb_
|
36
|
+
- lib/intelli_agent/ai.rb
|
40
37
|
homepage: https://github.com/gedean/intelli_agent
|
41
38
|
licenses:
|
42
39
|
- MIT
|
File without changes
|
File without changes
|
data/lib/ai.rb:Zone.Identifier
DELETED
File without changes
|
File without changes
|