omniai-anthropic 0.1.0 → 1.0.3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/Gemfile +14 -0
- data/README.md +96 -16
- data/lib/omniai/anthropic/chat/completion.rb +24 -0
- data/lib/omniai/anthropic/chat/stream.rb +106 -0
- data/lib/omniai/anthropic/chat.rb +68 -0
- data/lib/omniai/anthropic/client.rb +63 -0
- data/lib/omniai/anthropic/config.rb +18 -0
- data/lib/omniai/anthropic/version.rb +1 -1
- data/lib/omniai/anthropic.rb +17 -3
- metadata +22 -17
- data/.rspec +0 -3
- data/.rubocop.yml +0 -8
- data/Rakefile +0 -12
- data/sig/omniai/anthropic.rbs +0 -6
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 8eff2ca7ce3838279c86f54ebdce6872f28e6e8b023f6a9e67494458d2dfb74c
|
4
|
+
data.tar.gz: 770fa79aee0917b1271e05a925566c766b0203c21a4a8a3e2bc030fd0fb77509
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 1ecb8c35b85e6a7c88a0f882441fc44a89d7abe3504c6afd6ba20b0555bd13a24c021eaab2482e06009ef61b8b6034b19148819d78ead0ae9501d99d63ba283f
|
7
|
+
data.tar.gz: d63988b5ba55395a47c80880f47508226ad587431babe47b885724a3d3db0edd56afcbc431d291548c510523be08561d02ce520afc1be8d9acb573aebf3f43a3
|
data/Gemfile
ADDED
data/README.md
CHANGED
@@ -1,31 +1,111 @@
|
|
1
|
-
#
|
1
|
+
# OmniAI::Anthropic
|
2
2
|
|
3
|
-
|
4
|
-
|
5
|
-
Welcome to your new gem! In this directory, you'll find the files you need to be able to package up your Ruby library into a gem. Put your Ruby code in the file `lib/omniai/anthropic`. To experiment with that code, run `bin/console` for an interactive prompt.
|
3
|
+
An Anthropic implementation of the [OmniAI](https://github.com/ksylvest/omniai) APIs.
|
6
4
|
|
7
5
|
## Installation
|
8
6
|
|
9
|
-
|
7
|
+
```sh
|
8
|
+
gem install omniai-anthropic
|
9
|
+
```
|
10
10
|
|
11
|
-
|
11
|
+
## Usage
|
12
12
|
|
13
|
-
|
13
|
+
### Client
|
14
14
|
|
15
|
-
|
15
|
+
A client is setup as follows if `ENV['ANTHROPIC_API_KEY']` exists:
|
16
16
|
|
17
|
-
|
17
|
+
```ruby
|
18
|
+
client = OmniAI::Anthropic::Client.new
|
19
|
+
```
|
18
20
|
|
19
|
-
|
21
|
+
A client may also be passed the following options:
|
22
|
+
|
23
|
+
- `api_key` (required - default is `ENV['ANTHROPIC_API_KEY']`)
|
24
|
+
- `host` (optional)
|
25
|
+
|
26
|
+
### Configuration
|
27
|
+
|
28
|
+
Global configuration is supported for the following options:
|
29
|
+
|
30
|
+
```ruby
|
31
|
+
OmniAI::Anthropic.configure do |config|
|
32
|
+
config.api_key = '...' # default: ENV['ANTHROPIC_API_KEY']
|
33
|
+
config.host = '...' # default: 'https://api.anthropic.com'
|
34
|
+
end
|
35
|
+
```
|
36
|
+
|
37
|
+
### Chat
|
38
|
+
|
39
|
+
A chat completion is generated by passing in prompts using any a variety of formats:
|
40
|
+
|
41
|
+
```ruby
|
42
|
+
completion = client.chat('Tell me a joke!')
|
43
|
+
completion.choice.message.content # 'Why did the chicken cross the road? To get to the other side.'
|
44
|
+
```
|
45
|
+
|
46
|
+
```ruby
|
47
|
+
completion = client.chat({
|
48
|
+
role: OmniAI::Chat::Role::USER,
|
49
|
+
content: 'Is it wise to jump off a bridge?'
|
50
|
+
})
|
51
|
+
completion.choice.message.content # 'No.'
|
52
|
+
```
|
53
|
+
|
54
|
+
```ruby
|
55
|
+
completion = client.chat([
|
56
|
+
{
|
57
|
+
role: OmniAI::Chat::Role::SYSTEM,
|
58
|
+
content: 'You are a helpful assistant.'
|
59
|
+
},
|
60
|
+
'What is the capital of Canada?',
|
61
|
+
])
|
62
|
+
completion.choice.message.content # 'The capital of Canada is Ottawa.'
|
63
|
+
```
|
64
|
+
|
65
|
+
#### Model
|
66
|
+
|
67
|
+
`model` takes an optional string (default is `claude-3-haiku-20240307`):
|
68
|
+
|
69
|
+
```ruby
|
70
|
+
completion = client.chat('Provide code for fibonacci', model: OmniAI::Anthropic::Chat::Model::OPUS)
|
71
|
+
completion.choice.message.content # 'def fibonacci(n)...end'
|
72
|
+
```
|
73
|
+
|
74
|
+
[Anthropic API Reference `model`](https://docs.anthropic.com/en/api/messages)
|
75
|
+
|
76
|
+
#### Temperature
|
77
|
+
|
78
|
+
`temperature` takes an optional float between `0.0` and `1.0` (defaults is `0.7`):
|
79
|
+
|
80
|
+
```ruby
|
81
|
+
completion = client.chat('Pick a number between 1 and 5', temperature: 1.0)
|
82
|
+
completion.choice.message.content # '3'
|
83
|
+
```
|
84
|
+
|
85
|
+
[Anthropic API Reference `temperature`](https://docs.anthropic.com/en/api/messages)
|
86
|
+
|
87
|
+
#### Stream
|
88
|
+
|
89
|
+
`stream` takes an optional a proc to stream responses in real-time chunks instead of waiting for a complete response:
|
20
90
|
|
21
|
-
|
91
|
+
```ruby
|
92
|
+
stream = proc do |chunk|
|
93
|
+
print(chunk.choice.delta.content) # 'Better', 'three', 'hours', ...
|
94
|
+
end
|
95
|
+
client.chat('Be poetic.', stream:)
|
96
|
+
```
|
22
97
|
|
23
|
-
|
98
|
+
[Anthropic API Reference `stream`](https://docs.anthropic.com/en/api/messages)
|
24
99
|
|
25
|
-
|
100
|
+
#### Format
|
26
101
|
|
27
|
-
|
102
|
+
`format` takes an optional symbol (`:json`) and modifies requests to send additional system text requesting JSON:
|
28
103
|
|
29
|
-
|
104
|
+
```ruby
|
105
|
+
completion = client.chat([
|
106
|
+
{ role: OmniAI::Chat::Role::USER, content: 'What is the name of the drummer for the Beatles?' }
|
107
|
+
], format: :json)
|
108
|
+
JSON.parse(completion.choice.message.content) # { "name": "Ringo" }
|
109
|
+
```
|
30
110
|
|
31
|
-
|
111
|
+
[Anthropic API Reference `control-output-format`](https://docs.anthropic.com/en/docs/control-output-format)
|
@@ -0,0 +1,24 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
module Anthropic
|
5
|
+
class Chat
|
6
|
+
# A completion returned by the API.
|
7
|
+
class Completion < OmniAI::Chat::Completion
|
8
|
+
# @return [Array<OmniAI::Chat::Choice>]
|
9
|
+
def choices
|
10
|
+
@choices ||= begin
|
11
|
+
role = @data['role']
|
12
|
+
|
13
|
+
@data['content'].map do |data, index|
|
14
|
+
OmniAI::Chat::Choice.new(data: {
|
15
|
+
'index' => index,
|
16
|
+
'message' => { 'role' => role, 'content' => data['text'] },
|
17
|
+
})
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
@@ -0,0 +1,106 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
module Anthropic
|
5
|
+
class Chat
|
6
|
+
# A stream given when streaming.
|
7
|
+
class Stream < OmniAI::Chat::Stream
|
8
|
+
module Type
|
9
|
+
PING = 'ping'
|
10
|
+
MESSAGE_START = 'message_start'
|
11
|
+
MESSAGE_STOP = 'message_stop'
|
12
|
+
MESSAGE_DELTA = 'message_delta'
|
13
|
+
CONTENT_BLOCK_START = 'content_block_start'
|
14
|
+
CONTENT_BLOCK_STOP = 'content_block_stop'
|
15
|
+
CONTENT_BLOCK_DELTA = 'content_block_delta'
|
16
|
+
end
|
17
|
+
|
18
|
+
# Process the stream into chunks by event.
|
19
|
+
class Builder
|
20
|
+
attr_reader :id, :model, :role, :content, :index
|
21
|
+
|
22
|
+
# @return [OmniAI::Chat::Chunk]
|
23
|
+
def chunk
|
24
|
+
OmniAI::Chat::Chunk.new(data: {
|
25
|
+
'id' => @id,
|
26
|
+
'model' => @model,
|
27
|
+
'choices' => [{
|
28
|
+
'index' => @index,
|
29
|
+
'delta' => { 'role' => @role, 'content' => @content },
|
30
|
+
}],
|
31
|
+
})
|
32
|
+
end
|
33
|
+
|
34
|
+
# Handler for Type::MESSAGE_START
|
35
|
+
#
|
36
|
+
# @param data [Hash]
|
37
|
+
def message_start(data)
|
38
|
+
@id = data['id']
|
39
|
+
@model = data['model']
|
40
|
+
@role = data['role']
|
41
|
+
end
|
42
|
+
|
43
|
+
# Handler for Type::MESSAGE_STOP
|
44
|
+
#
|
45
|
+
# @param data [Hash]
|
46
|
+
def message_stop(_)
|
47
|
+
@id = nil
|
48
|
+
@model = nil
|
49
|
+
@role = nil
|
50
|
+
end
|
51
|
+
|
52
|
+
# Handler for Type::CONTENT_BLOCK_START
|
53
|
+
#
|
54
|
+
# @param data [Hash]
|
55
|
+
def content_block_start(data)
|
56
|
+
@index = data['index']
|
57
|
+
end
|
58
|
+
|
59
|
+
# Handler for Type::CONTENT_BLOCK_STOP
|
60
|
+
#
|
61
|
+
# @param data [Hash]
|
62
|
+
def content_block_stop(_)
|
63
|
+
@index = nil
|
64
|
+
end
|
65
|
+
|
66
|
+
# Handler for Type::CONTENT_BLOCK_DELTA
|
67
|
+
#
|
68
|
+
# @param data [Hash]
|
69
|
+
def content_block_delta(data)
|
70
|
+
return unless data['delta']['type'].eql?('text_delta')
|
71
|
+
|
72
|
+
@content = data['delta']['text']
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
# @yield [OmniAI::Chat::Chunk]
|
77
|
+
def stream!(&block)
|
78
|
+
builder = Builder.new
|
79
|
+
|
80
|
+
@response.body.each do |chunk|
|
81
|
+
@parser.feed(chunk) do |type, data|
|
82
|
+
process(type:, data: JSON.parse(data), builder:, &block)
|
83
|
+
end
|
84
|
+
end
|
85
|
+
end
|
86
|
+
|
87
|
+
private
|
88
|
+
|
89
|
+
# @param type [String]
|
90
|
+
# @param data [Hash]
|
91
|
+
# @param builder [Builder]
|
92
|
+
def process(type:, data:, builder:, &)
|
93
|
+
case type
|
94
|
+
when Type::MESSAGE_START then builder.message_start(data)
|
95
|
+
when Type::CONTENT_BLOCK_START then builder.content_block_start(data)
|
96
|
+
when Type::CONTENT_BLOCK_STOP then builder.content_block_stop(data)
|
97
|
+
when Type::MESSAGE_STOP then builder.message_stop(data)
|
98
|
+
when Type::CONTENT_BLOCK_DELTA
|
99
|
+
builder.content_block_delta(data)
|
100
|
+
yield(builder.chunk)
|
101
|
+
end
|
102
|
+
end
|
103
|
+
end
|
104
|
+
end
|
105
|
+
end
|
106
|
+
end
|
@@ -0,0 +1,68 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
module Anthropic
|
5
|
+
# A Anthropic chat implementation.
|
6
|
+
#
|
7
|
+
# Usage:
|
8
|
+
#
|
9
|
+
# chat = OmniAI::Anthropic::Chat.new(client: client)
|
10
|
+
# chat.completion('Tell me a joke.')
|
11
|
+
# chat.completion(['Tell me a joke.'])
|
12
|
+
# chat.completion({ role: 'user', content: 'Tell me a joke.' })
|
13
|
+
# chat.completion([{ role: 'system', content: 'Tell me a joke.' }])
|
14
|
+
class Chat < OmniAI::Chat
|
15
|
+
module Model
|
16
|
+
CLAUDE_INSTANT_1_0 = 'claude-instant-1.2'
|
17
|
+
CLAUDE_2_0 = 'claude-2.0'
|
18
|
+
CLAUDE_2_1 = 'claude-2.1'
|
19
|
+
CLAUDE_3_OPUS_20240229 = 'claude-3-opus-20240229'
|
20
|
+
CLAUDE_3_HAIKU_20240307 = 'claude-3-haiku-20240307'
|
21
|
+
CLAUDE_3_SONET_20240307 = 'claude-3-haiku-20240307'
|
22
|
+
CLAUDE_OPUS = CLAUDE_3_OPUS_20240229
|
23
|
+
CLAUDE_HAIKU = CLAUDE_3_HAIKU_20240307
|
24
|
+
CLAUDE_SONET = CLAUDE_3_SONET_20240307
|
25
|
+
end
|
26
|
+
|
27
|
+
protected
|
28
|
+
|
29
|
+
# @param response [HTTP::Response]
|
30
|
+
# @return [OmniAI::Anthropic::Chat::Stream]
|
31
|
+
def stream!(response:)
|
32
|
+
raise Error, "#{self.class.name}#stream! unstreamable" unless @stream
|
33
|
+
|
34
|
+
Stream.new(response:).stream! { |chunk| @stream.call(chunk) }
|
35
|
+
end
|
36
|
+
|
37
|
+
# @param response [HTTP::Response]
|
38
|
+
# @param response [OmniAI::Anthropic::Chat::Completion]
|
39
|
+
def complete!(response:)
|
40
|
+
Completion.new(data: response.parse)
|
41
|
+
end
|
42
|
+
|
43
|
+
# @return [Hash]
|
44
|
+
def payload
|
45
|
+
OmniAI::Anthropic.config.chat_options.merge({
|
46
|
+
model: @model,
|
47
|
+
messages: messages.filter { |message| !message[:role].eql?(OmniAI::Chat::Role::SYSTEM) },
|
48
|
+
system:,
|
49
|
+
stream: @stream.nil? ? nil : !@stream.nil?,
|
50
|
+
temperature: @temperature,
|
51
|
+
}).compact
|
52
|
+
end
|
53
|
+
|
54
|
+
# @return [String, nil]
|
55
|
+
def system
|
56
|
+
messages = self.messages.filter { |message| message[:role].eql?(OmniAI::Chat::Role::SYSTEM) }
|
57
|
+
messages << { role: OmniAI::Chat::Role::SYSTEM, content: OmniAI::Chat::JSON_PROMPT } if @format.eql?(:json)
|
58
|
+
|
59
|
+
messages.map { |message| message[:content] }.join("\n\n") if messages.any?
|
60
|
+
end
|
61
|
+
|
62
|
+
# @return [String]
|
63
|
+
def path
|
64
|
+
"/#{OmniAI::Anthropic::Client::VERSION}/messages"
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
@@ -0,0 +1,63 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
module Anthropic
|
5
|
+
# An Anthropic client implementation. Usage:
|
6
|
+
#
|
7
|
+
# w/ `api_key``:
|
8
|
+
# client = OmniAI::Anthropic::Client.new(api_key: '...')
|
9
|
+
#
|
10
|
+
# w/ ENV['ANTHROPIC_API_KEY']:
|
11
|
+
#
|
12
|
+
# ENV['ANTHROPIC_API_KEY'] = '...'
|
13
|
+
# client = OmniAI::Anthropic::Client.new
|
14
|
+
#
|
15
|
+
# w/ config:
|
16
|
+
#
|
17
|
+
# OmniAI::Anthropic.configure do |config|
|
18
|
+
# config.api_key = '...'
|
19
|
+
# end
|
20
|
+
#
|
21
|
+
# client = OmniAI::Anthropic::Client.new
|
22
|
+
class Client < OmniAI::Client
|
23
|
+
VERSION = 'v1'
|
24
|
+
|
25
|
+
# @param api_key [String] optional - defaults to `OmniAI::Anthropic.config.api_key`
|
26
|
+
# @param host [String] optional - defaults to `OmniAI::Anthropic.config.host`
|
27
|
+
def initialize(
|
28
|
+
api_key: OmniAI::Anthropic.config.api_key,
|
29
|
+
version: OmniAI::Anthropic.config.version,
|
30
|
+
logger: OmniAI::Anthropic.config.logger,
|
31
|
+
host: OmniAI::Anthropic.config.host
|
32
|
+
)
|
33
|
+
raise(ArgumentError, %(ENV['ANTHROPIC_API_KEY'] must be defined or `api_key` must be passed)) if api_key.nil?
|
34
|
+
|
35
|
+
super(api_key:, logger:)
|
36
|
+
|
37
|
+
@host = host
|
38
|
+
@version = version
|
39
|
+
end
|
40
|
+
|
41
|
+
# @return [HTTP::Client]
|
42
|
+
def connection
|
43
|
+
HTTP
|
44
|
+
.headers('x-api-key': @api_key)
|
45
|
+
.headers('anthropic-version': @version)
|
46
|
+
.persistent('https://api.anthropic.com')
|
47
|
+
end
|
48
|
+
|
49
|
+
# @raise [OmniAI::Error]
|
50
|
+
#
|
51
|
+
# @param messages [String, Array, Hash]
|
52
|
+
# @param model [String] optional
|
53
|
+
# @param format [Symbol] optional :text or :json
|
54
|
+
# @param temperature [Float, nil] optional
|
55
|
+
# @param stream [Proc, nil] optional
|
56
|
+
#
|
57
|
+
# @return [OmniAI::Chat::Completion]
|
58
|
+
def chat(messages, model: Chat::Model::CLAUDE_HAIKU, temperature: nil, format: nil, stream: nil)
|
59
|
+
Chat.process!(messages, model:, temperature:, format:, stream:, client: self)
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
@@ -0,0 +1,18 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module OmniAI
|
4
|
+
module Anthropic
|
5
|
+
# Config for the Anthropic `api_key` / `host` / `logger` / `version` / `chat_options`.
|
6
|
+
class Config < OmniAI::Config
|
7
|
+
attr_accessor :version, :chat_options
|
8
|
+
|
9
|
+
def initialize
|
10
|
+
super
|
11
|
+
@api_key = ENV.fetch('ANTHROPIC_API_KEY', nil)
|
12
|
+
@host = ENV.fetch('ANTHROPIC_HOST', 'https://api.anthropic.com')
|
13
|
+
@version = ENV.fetch('ANTHROPIC_VERSION', '2023-06-01')
|
14
|
+
@chat_options = { max_tokens: 4096 }
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
data/lib/omniai/anthropic.rb
CHANGED
@@ -1,10 +1,24 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
|
3
|
+
require 'event_stream_parser'
|
4
|
+
require 'omniai'
|
5
|
+
require 'zeitwerk'
|
6
|
+
|
7
|
+
loader = Zeitwerk::Loader.for_gem
|
8
|
+
loader.push_dir(__dir__, namespace: OmniAI)
|
9
|
+
loader.setup
|
4
10
|
|
5
11
|
module OmniAI
|
12
|
+
# A namespace for everything Anthropic.
|
6
13
|
module Anthropic
|
7
|
-
|
8
|
-
|
14
|
+
# @return [OmniAI::Anthropic::Config]
|
15
|
+
def self.config
|
16
|
+
@config ||= Config.new
|
17
|
+
end
|
18
|
+
|
19
|
+
# @yield [OmniAI::Anthropic::Config]
|
20
|
+
def self.configure
|
21
|
+
yield config
|
22
|
+
end
|
9
23
|
end
|
10
24
|
end
|
metadata
CHANGED
@@ -1,17 +1,17 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: omniai-anthropic
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version:
|
4
|
+
version: 1.0.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Kevin Sylvestre
|
8
|
-
autorequire:
|
8
|
+
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-06-
|
11
|
+
date: 2024-06-15 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
|
-
name:
|
14
|
+
name: event_stream_parser
|
15
15
|
requirement: !ruby/object:Gem::Requirement
|
16
16
|
requirements:
|
17
17
|
- - ">="
|
@@ -25,13 +25,13 @@ dependencies:
|
|
25
25
|
- !ruby/object:Gem::Version
|
26
26
|
version: '0'
|
27
27
|
- !ruby/object:Gem::Dependency
|
28
|
-
name:
|
28
|
+
name: omniai
|
29
29
|
requirement: !ruby/object:Gem::Requirement
|
30
30
|
requirements:
|
31
31
|
- - ">="
|
32
32
|
- !ruby/object:Gem::Version
|
33
33
|
version: '0'
|
34
|
-
type: :
|
34
|
+
type: :runtime
|
35
35
|
prerelease: false
|
36
36
|
version_requirements: !ruby/object:Gem::Requirement
|
37
37
|
requirements:
|
@@ -39,13 +39,13 @@ dependencies:
|
|
39
39
|
- !ruby/object:Gem::Version
|
40
40
|
version: '0'
|
41
41
|
- !ruby/object:Gem::Dependency
|
42
|
-
name:
|
42
|
+
name: zeitwerk
|
43
43
|
requirement: !ruby/object:Gem::Requirement
|
44
44
|
requirements:
|
45
45
|
- - ">="
|
46
46
|
- !ruby/object:Gem::Version
|
47
47
|
version: '0'
|
48
|
-
type: :
|
48
|
+
type: :runtime
|
49
49
|
prerelease: false
|
50
50
|
version_requirements: !ruby/object:Gem::Requirement
|
51
51
|
requirements:
|
@@ -59,17 +59,22 @@ executables: []
|
|
59
59
|
extensions: []
|
60
60
|
extra_rdoc_files: []
|
61
61
|
files:
|
62
|
-
-
|
63
|
-
- ".rubocop.yml"
|
62
|
+
- Gemfile
|
64
63
|
- README.md
|
65
|
-
- Rakefile
|
66
64
|
- lib/omniai/anthropic.rb
|
65
|
+
- lib/omniai/anthropic/chat.rb
|
66
|
+
- lib/omniai/anthropic/chat/completion.rb
|
67
|
+
- lib/omniai/anthropic/chat/stream.rb
|
68
|
+
- lib/omniai/anthropic/client.rb
|
69
|
+
- lib/omniai/anthropic/config.rb
|
67
70
|
- lib/omniai/anthropic/version.rb
|
68
|
-
- sig/omniai/anthropic.rbs
|
69
71
|
homepage: https://github.com/ksylvest/omniai-anthropic
|
70
72
|
licenses: []
|
71
|
-
metadata:
|
72
|
-
|
73
|
+
metadata:
|
74
|
+
homepage_uri: https://github.com/ksylvest/omniai-anthropic
|
75
|
+
changelog_uri: https://github.com/ksylvest/omniai-anthropic/releases
|
76
|
+
rubygems_mfa_required: 'true'
|
77
|
+
post_install_message:
|
73
78
|
rdoc_options: []
|
74
79
|
require_paths:
|
75
80
|
- lib
|
@@ -77,15 +82,15 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
77
82
|
requirements:
|
78
83
|
- - ">="
|
79
84
|
- !ruby/object:Gem::Version
|
80
|
-
version: 3.
|
85
|
+
version: 3.3.0
|
81
86
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
82
87
|
requirements:
|
83
88
|
- - ">="
|
84
89
|
- !ruby/object:Gem::Version
|
85
90
|
version: '0'
|
86
91
|
requirements: []
|
87
|
-
rubygems_version: 3.5.
|
88
|
-
signing_key:
|
92
|
+
rubygems_version: 3.5.3
|
93
|
+
signing_key:
|
89
94
|
specification_version: 4
|
90
95
|
summary: A generalized framework for interacting with Anthropic
|
91
96
|
test_files: []
|
data/.rspec
DELETED
data/.rubocop.yml
DELETED
data/Rakefile
DELETED