omniai-anthropic 1.3.0 → 1.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Gemfile +1 -1
- data/lib/omniai/anthropic/chat/response/completion.rb +29 -0
- data/lib/omniai/anthropic/chat/response/stream.rb +111 -0
- data/lib/omniai/anthropic/chat.rb +18 -18
- data/lib/omniai/anthropic/client.rb +3 -2
- data/lib/omniai/anthropic/version.rb +1 -1
- metadata +5 -5
- data/lib/omniai/anthropic/chat/completion.rb +0 -24
- data/lib/omniai/anthropic/chat/stream.rb +0 -106
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 2b6103ed1e3c87ab2d1f2767dc4dce991756eefb558886ab8436632083058aba
|
4
|
+
data.tar.gz: aa75e2707afdb2bdffb7f74716e1afa31c027141dbfd114c45a77c00abe52045
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: b06128d930cd70b61d2cea0ee2bb0318342d9114f7c9fe46ddaf8a1778f3570cb6e49748efd3a626d95daac9df1ecaf5680bb05180c500a4933e95fd682c942e
|
7
|
+
data.tar.gz: a18cce1b18ca45810dc9677856ef96e8af5d5faa71bf6160a981a5584d1f76157f45679481fc69e946d2837681cb9a40d8860fa6f52e879206c0af44612f5095
|
data/Gemfile
CHANGED
@@ -0,0 +1,29 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
module Anthropic
|
5
|
+
class Chat
|
6
|
+
module Response
|
7
|
+
# A completion returned by the API.
|
8
|
+
class Completion < OmniAI::Chat::Response::Completion
|
9
|
+
# @return [Array<OmniAI::Chat::Response::MessageChoice>]
|
10
|
+
def choices
|
11
|
+
@choices ||= begin
|
12
|
+
role = @data['role']
|
13
|
+
|
14
|
+
@data['content'].map do |data, index|
|
15
|
+
OmniAI::Chat::Response::MessageChoice.new(data: {
|
16
|
+
'index' => index,
|
17
|
+
'message' => {
|
18
|
+
'role' => role,
|
19
|
+
'content' => data['text'],
|
20
|
+
},
|
21
|
+
})
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
@@ -0,0 +1,111 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
module Anthropic
|
5
|
+
class Chat
|
6
|
+
module Response
|
7
|
+
# A stream given when streaming.
|
8
|
+
class Stream < OmniAI::Chat::Response::Stream
|
9
|
+
module Type
|
10
|
+
PING = 'ping'
|
11
|
+
MESSAGE_START = 'message_start'
|
12
|
+
MESSAGE_STOP = 'message_stop'
|
13
|
+
MESSAGE_DELTA = 'message_delta'
|
14
|
+
CONTENT_BLOCK_START = 'content_block_start'
|
15
|
+
CONTENT_BLOCK_STOP = 'content_block_stop'
|
16
|
+
CONTENT_BLOCK_DELTA = 'content_block_delta'
|
17
|
+
end
|
18
|
+
|
19
|
+
# Process the stream into chunks by event.
|
20
|
+
class Builder
|
21
|
+
attr_reader :id, :model, :role, :content, :index
|
22
|
+
|
23
|
+
# @return [OmniAI::Chat::Chunk]
|
24
|
+
def chunk
|
25
|
+
OmniAI::Chat::Response::Chunk.new(data: {
|
26
|
+
'id' => @id,
|
27
|
+
'model' => @model,
|
28
|
+
'choices' => [{
|
29
|
+
'index' => @index,
|
30
|
+
'delta' => {
|
31
|
+
'role' => @role,
|
32
|
+
'content' => @content,
|
33
|
+
},
|
34
|
+
}],
|
35
|
+
})
|
36
|
+
end
|
37
|
+
|
38
|
+
# Handler for Type::MESSAGE_START
|
39
|
+
#
|
40
|
+
# @param data [Hash]
|
41
|
+
def message_start(data)
|
42
|
+
@id = data['id']
|
43
|
+
@model = data['model']
|
44
|
+
@role = data['role']
|
45
|
+
end
|
46
|
+
|
47
|
+
# Handler for Type::MESSAGE_STOP
|
48
|
+
#
|
49
|
+
# @param data [Hash]
|
50
|
+
def message_stop(_)
|
51
|
+
@id = nil
|
52
|
+
@model = nil
|
53
|
+
@role = nil
|
54
|
+
end
|
55
|
+
|
56
|
+
# Handler for Type::CONTENT_BLOCK_START
|
57
|
+
#
|
58
|
+
# @param data [Hash]
|
59
|
+
def content_block_start(data)
|
60
|
+
@index = data['index']
|
61
|
+
end
|
62
|
+
|
63
|
+
# Handler for Type::CONTENT_BLOCK_STOP
|
64
|
+
#
|
65
|
+
# @param data [Hash]
|
66
|
+
def content_block_stop(_)
|
67
|
+
@index = nil
|
68
|
+
end
|
69
|
+
|
70
|
+
# Handler for Type::CONTENT_BLOCK_DELTA
|
71
|
+
#
|
72
|
+
# @param data [Hash]
|
73
|
+
def content_block_delta(data)
|
74
|
+
return unless data['delta']['type'].eql?('text_delta')
|
75
|
+
|
76
|
+
@content = data['delta']['text']
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
# @yield [OmniAI::Chat::Chunk]
|
81
|
+
def stream!(&block)
|
82
|
+
builder = Builder.new
|
83
|
+
|
84
|
+
@response.body.each do |chunk|
|
85
|
+
@parser.feed(chunk) do |type, data|
|
86
|
+
process(type:, data: JSON.parse(data), builder:, &block)
|
87
|
+
end
|
88
|
+
end
|
89
|
+
end
|
90
|
+
|
91
|
+
private
|
92
|
+
|
93
|
+
# @param type [String]
|
94
|
+
# @param data [Hash]
|
95
|
+
# @param builder [Builder]
|
96
|
+
def process(type:, data:, builder:, &)
|
97
|
+
case type
|
98
|
+
when Type::MESSAGE_START then builder.message_start(data)
|
99
|
+
when Type::CONTENT_BLOCK_START then builder.content_block_start(data)
|
100
|
+
when Type::CONTENT_BLOCK_STOP then builder.content_block_stop(data)
|
101
|
+
when Type::MESSAGE_STOP then builder.message_stop(data)
|
102
|
+
when Type::CONTENT_BLOCK_DELTA
|
103
|
+
builder.content_block_delta(data)
|
104
|
+
yield(builder.chunk)
|
105
|
+
end
|
106
|
+
end
|
107
|
+
end
|
108
|
+
end
|
109
|
+
end
|
110
|
+
end
|
111
|
+
end
|
@@ -27,42 +27,42 @@ module OmniAI
|
|
27
27
|
|
28
28
|
protected
|
29
29
|
|
30
|
-
# @param response [HTTP::Response]
|
31
|
-
# @return [OmniAI::Anthropic::Chat::Stream]
|
32
|
-
def stream!(response:)
|
33
|
-
raise Error, "#{self.class.name}#stream! unstreamable" unless @stream
|
34
|
-
|
35
|
-
Stream.new(response:).stream! { |chunk| @stream.call(chunk) }
|
36
|
-
end
|
37
|
-
|
38
|
-
# @param response [HTTP::Response]
|
39
|
-
# @param response [OmniAI::Anthropic::Chat::Completion]
|
40
|
-
def complete!(response:)
|
41
|
-
Completion.new(data: response.parse)
|
42
|
-
end
|
43
|
-
|
44
30
|
# @return [Hash]
|
45
31
|
def payload
|
46
32
|
OmniAI::Anthropic.config.chat_options.merge({
|
47
33
|
model: @model,
|
48
|
-
messages: messages.filter { |message| !message[:role].eql?(
|
34
|
+
messages: messages.filter { |message| !message[:role].eql?(Role::SYSTEM) },
|
49
35
|
system:,
|
50
36
|
stream: @stream.nil? ? nil : !@stream.nil?,
|
51
37
|
temperature: @temperature,
|
38
|
+
tools: tools_payload,
|
52
39
|
}).compact
|
53
40
|
end
|
54
41
|
|
55
42
|
# @return [String, nil]
|
56
43
|
def system
|
57
|
-
messages = self.messages.filter { |message| message[:role].eql?(
|
58
|
-
messages << { role:
|
44
|
+
messages = self.messages.filter { |message| message[:role].eql?(Role::SYSTEM) }
|
45
|
+
messages << { role: Role::SYSTEM, content: JSON_PROMPT } if @format.eql?(:json)
|
59
46
|
|
60
47
|
messages.map { |message| message[:content] }.join("\n\n") if messages.any?
|
61
48
|
end
|
62
49
|
|
63
50
|
# @return [String]
|
64
51
|
def path
|
65
|
-
"/#{
|
52
|
+
"/#{Client::VERSION}/messages"
|
53
|
+
end
|
54
|
+
|
55
|
+
private
|
56
|
+
|
57
|
+
# @return [Array<Hash>, nil]
|
58
|
+
def tools_payload
|
59
|
+
@tools&.map do |tool|
|
60
|
+
{
|
61
|
+
name: tool.name,
|
62
|
+
description: tool.description,
|
63
|
+
input_schema: tool.parameters&.prepare,
|
64
|
+
}.compact
|
65
|
+
end
|
66
66
|
end
|
67
67
|
end
|
68
68
|
end
|
@@ -57,10 +57,11 @@ module OmniAI
|
|
57
57
|
# @param format [Symbol] optional :text or :json
|
58
58
|
# @param temperature [Float, nil] optional
|
59
59
|
# @param stream [Proc, nil] optional
|
60
|
+
# @param tools [Array<OmniAI::Tool>, nil] optional
|
60
61
|
#
|
61
62
|
# @return [OmniAI::Chat::Completion]
|
62
|
-
def chat(messages, model: Chat::Model::CLAUDE_HAIKU, temperature: nil, format: nil, stream: nil)
|
63
|
-
Chat.process!(messages, model:, temperature:, format:, stream:, client: self)
|
63
|
+
def chat(messages, model: Chat::Model::CLAUDE_HAIKU, temperature: nil, format: nil, stream: nil, tools: nil)
|
64
|
+
Chat.process!(messages, model:, temperature:, format:, stream:, tools:, client: self)
|
64
65
|
end
|
65
66
|
end
|
66
67
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: omniai-anthropic
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.5.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Kevin Sylvestre
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-
|
11
|
+
date: 2024-07-12 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: event_stream_parser
|
@@ -63,8 +63,8 @@ files:
|
|
63
63
|
- README.md
|
64
64
|
- lib/omniai/anthropic.rb
|
65
65
|
- lib/omniai/anthropic/chat.rb
|
66
|
-
- lib/omniai/anthropic/chat/completion.rb
|
67
|
-
- lib/omniai/anthropic/chat/stream.rb
|
66
|
+
- lib/omniai/anthropic/chat/response/completion.rb
|
67
|
+
- lib/omniai/anthropic/chat/response/stream.rb
|
68
68
|
- lib/omniai/anthropic/client.rb
|
69
69
|
- lib/omniai/anthropic/config.rb
|
70
70
|
- lib/omniai/anthropic/version.rb
|
@@ -90,7 +90,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
90
90
|
- !ruby/object:Gem::Version
|
91
91
|
version: '0'
|
92
92
|
requirements: []
|
93
|
-
rubygems_version: 3.5.
|
93
|
+
rubygems_version: 3.5.14
|
94
94
|
signing_key:
|
95
95
|
specification_version: 4
|
96
96
|
summary: A generalized framework for interacting with Anthropic
|
@@ -1,24 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module OmniAI
|
4
|
-
module Anthropic
|
5
|
-
class Chat
|
6
|
-
# A completion returned by the API.
|
7
|
-
class Completion < OmniAI::Chat::Completion
|
8
|
-
# @return [Array<OmniAI::Chat::MessageChoice>]
|
9
|
-
def choices
|
10
|
-
@choices ||= begin
|
11
|
-
role = @data['role']
|
12
|
-
|
13
|
-
@data['content'].map do |data, index|
|
14
|
-
OmniAI::Chat::MessageChoice.for(data: {
|
15
|
-
'index' => index,
|
16
|
-
'message' => { 'role' => role, 'content' => data['text'] },
|
17
|
-
})
|
18
|
-
end
|
19
|
-
end
|
20
|
-
end
|
21
|
-
end
|
22
|
-
end
|
23
|
-
end
|
24
|
-
end
|
@@ -1,106 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module OmniAI
|
4
|
-
module Anthropic
|
5
|
-
class Chat
|
6
|
-
# A stream given when streaming.
|
7
|
-
class Stream < OmniAI::Chat::Stream
|
8
|
-
module Type
|
9
|
-
PING = 'ping'
|
10
|
-
MESSAGE_START = 'message_start'
|
11
|
-
MESSAGE_STOP = 'message_stop'
|
12
|
-
MESSAGE_DELTA = 'message_delta'
|
13
|
-
CONTENT_BLOCK_START = 'content_block_start'
|
14
|
-
CONTENT_BLOCK_STOP = 'content_block_stop'
|
15
|
-
CONTENT_BLOCK_DELTA = 'content_block_delta'
|
16
|
-
end
|
17
|
-
|
18
|
-
# Process the stream into chunks by event.
|
19
|
-
class Builder
|
20
|
-
attr_reader :id, :model, :role, :content, :index
|
21
|
-
|
22
|
-
# @return [OmniAI::Chat::Chunk]
|
23
|
-
def chunk
|
24
|
-
OmniAI::Chat::Chunk.new(data: {
|
25
|
-
'id' => @id,
|
26
|
-
'model' => @model,
|
27
|
-
'choices' => [{
|
28
|
-
'index' => @index,
|
29
|
-
'delta' => { 'role' => @role, 'content' => @content },
|
30
|
-
}],
|
31
|
-
})
|
32
|
-
end
|
33
|
-
|
34
|
-
# Handler for Type::MESSAGE_START
|
35
|
-
#
|
36
|
-
# @param data [Hash]
|
37
|
-
def message_start(data)
|
38
|
-
@id = data['id']
|
39
|
-
@model = data['model']
|
40
|
-
@role = data['role']
|
41
|
-
end
|
42
|
-
|
43
|
-
# Handler for Type::MESSAGE_STOP
|
44
|
-
#
|
45
|
-
# @param data [Hash]
|
46
|
-
def message_stop(_)
|
47
|
-
@id = nil
|
48
|
-
@model = nil
|
49
|
-
@role = nil
|
50
|
-
end
|
51
|
-
|
52
|
-
# Handler for Type::CONTENT_BLOCK_START
|
53
|
-
#
|
54
|
-
# @param data [Hash]
|
55
|
-
def content_block_start(data)
|
56
|
-
@index = data['index']
|
57
|
-
end
|
58
|
-
|
59
|
-
# Handler for Type::CONTENT_BLOCK_STOP
|
60
|
-
#
|
61
|
-
# @param data [Hash]
|
62
|
-
def content_block_stop(_)
|
63
|
-
@index = nil
|
64
|
-
end
|
65
|
-
|
66
|
-
# Handler for Type::CONTENT_BLOCK_DELTA
|
67
|
-
#
|
68
|
-
# @param data [Hash]
|
69
|
-
def content_block_delta(data)
|
70
|
-
return unless data['delta']['type'].eql?('text_delta')
|
71
|
-
|
72
|
-
@content = data['delta']['text']
|
73
|
-
end
|
74
|
-
end
|
75
|
-
|
76
|
-
# @yield [OmniAI::Chat::Chunk]
|
77
|
-
def stream!(&block)
|
78
|
-
builder = Builder.new
|
79
|
-
|
80
|
-
@response.body.each do |chunk|
|
81
|
-
@parser.feed(chunk) do |type, data|
|
82
|
-
process(type:, data: JSON.parse(data), builder:, &block)
|
83
|
-
end
|
84
|
-
end
|
85
|
-
end
|
86
|
-
|
87
|
-
private
|
88
|
-
|
89
|
-
# @param type [String]
|
90
|
-
# @param data [Hash]
|
91
|
-
# @param builder [Builder]
|
92
|
-
def process(type:, data:, builder:, &)
|
93
|
-
case type
|
94
|
-
when Type::MESSAGE_START then builder.message_start(data)
|
95
|
-
when Type::CONTENT_BLOCK_START then builder.content_block_start(data)
|
96
|
-
when Type::CONTENT_BLOCK_STOP then builder.content_block_stop(data)
|
97
|
-
when Type::MESSAGE_STOP then builder.message_stop(data)
|
98
|
-
when Type::CONTENT_BLOCK_DELTA
|
99
|
-
builder.content_block_delta(data)
|
100
|
-
yield(builder.chunk)
|
101
|
-
end
|
102
|
-
end
|
103
|
-
end
|
104
|
-
end
|
105
|
-
end
|
106
|
-
end
|