intelli_agent 0.0.1 → 0.0.3

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 3fca7e6fcb2e2360ccd76c9d09506f73db72dd525dfd771322a19a6982eedd52
4
- data.tar.gz: 2bcb9b1cc62b6dfb9a6c4ecc634dab04e9c1c27a2941dd2feb6a0f8aad858934
3
+ metadata.gz: 729fa0b1b8a80ceaeb2568429fa7cbbe167f63d865d6beb1283daa62eaf4ca21
4
+ data.tar.gz: 505c4a6e21b9e4520b21eca38aa19cac782bbe4701acb02539f6e276baf3e04d
5
5
  SHA512:
6
- metadata.gz: 3de9eaddade5b5d5f16e385ef4cbff9bd7e7eade087b4bbc04c5c8ae2c3d0cc15ef79b518536d023f3153b62a4b748e53f735b7f33a338450db403d824c86f06
7
- data.tar.gz: aa27675fcfc8c0bcab9701b1f0e8ca6ec09eb03f8c4fd141b9c0b63465c196f9bfcf85ad8c401eeb2c32e82930f310c3ae0fbac445f43f1e5d4dd6aa6c1622e7
6
+ metadata.gz: c395e1406aff8919b518122b75c28f666d1e7ecfcebb21951f04b92824d4edc165d25e4a8eb24cfccc1bf1f3c2934651dcd27dd33aac09a426f94394c0ea26f3
7
+ data.tar.gz: 5d0ac8a6696e167ea964c5fe8090b9d318b6742dbe15f88117f094e74332e8f472dad2ff2aeb6d47fa204de5b5e5f1cb9534cfa30aaa67eb7b6f285eed2e74c8
@@ -1,14 +1,14 @@
1
1
  # In the future, this became a bus to more than one AI provider
2
2
  module AI
3
- BASIC_MODEL = ENV.fetch('OPENAI_BASIC_MODEL')
4
- ADVANCED_MODEL = ENV.fetch('OPENAI_ADVANCED_MODEL')
3
+ BASIC_MODEL = 'gpt-4o-mini' # ENV.fetch('OPENAI_BASIC_MODEL')
4
+ ADVANCED_MODEL = 'gpt-4o' # ENV.fetch('OPENAI_ADVANCED_MODEL')
5
5
 
6
- def self.embed(input, model: 'text-embedding-3-large')
6
+ def embed(input, model: 'text-embedding-3-large')
7
7
  response = OpenAI::Client.new.embeddings(parameters: { input:, model: })
8
8
  response.dig('data', 0, 'embedding')
9
9
  end
10
10
 
11
- def self.single_prompt(prompt:, model: AI::BASIC_MODEL, response_format: nil)
11
+ def single_prompt(prompt:, model: AI::BASIC_MODEL, response_format: nil)
12
12
  parameters = { model:, messages: [{ role: 'user', content: prompt }] }
13
13
 
14
14
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
@@ -17,7 +17,7 @@ module AI
17
17
  response.dig('choices', 0, 'message', 'content').strip
18
18
  end
19
19
 
20
- def self.vision(prompt:, image_url:, response_format: nil)
20
+ def vision(prompt:, image_url:, response_format: nil)
21
21
  messages = [{ type: :text, text: prompt },
22
22
  { type: :image_url, image_url: { url: image_url } }]
23
23
 
@@ -29,7 +29,7 @@ module AI
29
29
  response.dig('choices', 0, 'message', 'content').strip
30
30
  end
31
31
 
32
- def self.single_chat(system:, user:, model: AI::BASIC_MODEL, response_format: nil)
32
+ def single_chat(system:, user:, model: AI::BASIC_MODEL, response_format: nil)
33
33
  parameters = { model:,
34
34
  messages: [
35
35
  { role: 'system', content: system },
@@ -42,7 +42,7 @@ module AI
42
42
  response.dig('choices', 0, 'message', 'content').strip
43
43
  end
44
44
 
45
- def self.chat(messages, model: AI::BASIC_MODEL, response_format: nil)
45
+ def chat(messages, model: AI::BASIC_MODEL, response_format: nil)
46
46
  parameters = { model:, messages: }
47
47
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
48
48
 
@@ -50,7 +50,7 @@ module AI
50
50
  response.dig('choices', 0, 'message', 'content').strip
51
51
  end
52
52
 
53
- def self.models
53
+ def models
54
54
  OpenAI::Client.new.models.list
55
55
  end
56
56
  end
@@ -1,10 +1,14 @@
1
- class AI::Agent
1
+ require 'openai'
2
+ require 'intelli_agent/ai'
3
+
4
+ class IntelliAgent
5
+ extend AI
2
6
  attr_reader :assistant, :thread, :instructions, :vector_store_id
3
7
 
4
8
  def initialize(assistant_id: nil, thread_id: nil, thread_instructions: nil, vector_store_id: nil)
5
9
  @openai_client = OpenAI::Client.new
6
10
 
7
- assistant_id ||= ENV.fetch('OPENAI_AKAPU_CORE_ASSISTANT_ID')
11
+ assistant_id ||= ENV.fetch('OPENAI_ASSISTANT_ID')
8
12
  @assistant = @openai_client.assistants.retrieve(id: assistant_id)
9
13
 
10
14
  thread_params = {}
@@ -19,17 +23,10 @@ class AI::Agent
19
23
  @instructions = thread_instructions || @assistant['instructions']
20
24
  end
21
25
 
22
- def add_message(text, role: 'user')
23
- @openai_client.messages.create(thread_id: @thread['id'], parameters: { role: role, content: text })
24
- end
25
-
26
- def messages
27
- @openai_client.messages.list(thread_id: @thread['id'])
28
- end
29
-
30
- def last_message
31
- messages['data'].first['content'].first['text']['value']
32
- end
26
+ def add_message(text, role: 'user') = @openai_client.messages.create(thread_id: @thread['id'], parameters: { role: role, content: text })
27
+ def messages = @openai_client.messages.list(thread_id: @thread['id'])
28
+ def last_message = messages['data'].first['content'].first['text']['value']
29
+ def runs = @openai_client.runs.list(thread_id: @thread['id'])
33
30
 
34
31
  def run(instructions: nil, additional_instructions: nil, additional_message: nil, model: nil, tool_choice: nil)
35
32
  params = { assistant_id: @assistant['id'] }
@@ -64,8 +61,4 @@ class AI::Agent
64
61
  end
65
62
  end
66
63
  end
67
-
68
- def runs
69
- @openai_client.runs.list(thread_id: @thread['id'])
70
- end
71
64
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: intelli_agent
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.1
4
+ version: 0.0.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Gedean Dias
@@ -31,12 +31,9 @@ extensions: []
31
31
  extra_rdoc_files: []
32
32
  files:
33
33
  - README.md
34
- - lib/ai.rb
35
- - lib/ai.rb:Zone.Identifier
36
- - lib/ai/agent.rb
37
- - lib/ai/agent.rb:Zone.Identifier
38
- - lib/ai/agent_test.rb_
39
- - lib/ai/agent_test.rb_:Zone.Identifier
34
+ - lib/intelli_agent.rb
35
+ - lib/intelli_agent/agent_test.rb_
36
+ - lib/intelli_agent/ai.rb
40
37
  homepage: https://github.com/gedean/intelli_agent
41
38
  licenses:
42
39
  - MIT
File without changes
File without changes
File without changes
File without changes