omniai-deepseek 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: 2b01ec27c812ac1fd9d44619b8bf622b3e2f9f2de08e18ae5b2ecb77f8304146
4
+ data.tar.gz: 5c64510f011221a229130b58e486e771d10a51aab36f8b80616f38cf1ce4dfa9
5
+ SHA512:
6
+ metadata.gz: a8c786ce882c8ed8f96227b05cd5def3315f595f73a873f5890224056990a79167ce401bad34d383f25c29953495bbaceb5a06dde137b88c95acaa665a0a9b02
7
+ data.tar.gz: 894147e0f0b1e0c8aa3c8ad6fb2faece3a9560c04ffa11783182f5b9ff8457eccb37222b6c97c382fe2110f20ed47799e433fc8c863306c7c99c62b9152269dd
data/Gemfile ADDED
@@ -0,0 +1,18 @@
1
+ # frozen_string_literal: true
2
+
3
+ source "https://rubygems.org"
4
+
5
+ gemspec
6
+
7
+ gem "irb"
8
+ gem "logger"
9
+ gem "rake"
10
+ gem "rspec"
11
+ gem "rspec_junit_formatter"
12
+ gem "rubocop"
13
+ gem "rubocop-basic"
14
+ gem "rubocop-rake"
15
+ gem "rubocop-rspec"
16
+ gem "simplecov"
17
+ gem "webmock"
18
+ gem "yard"
data/README.md ADDED
@@ -0,0 +1,111 @@
1
+ # OmniAI::DeepSeek
2
+
3
+ [![LICENSE](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/ksylvest/omniai-deepseek/blob/main/LICENSE)
4
+ [![RubyGems](https://img.shields.io/gem/v/omniai-deepseek)](https://rubygems.org/gems/omniai-deepseek)
5
+ [![GitHub](https://img.shields.io/badge/github-repo-blue.svg)](https://github.com/ksylvest/omniai-deepseek)
6
+ [![Yard](https://img.shields.io/badge/docs-site-blue.svg)](https://omniai-deepseek.ksylvest.com)
7
+ [![CircleCI](https://img.shields.io/circleci/build/github/ksylvest/omniai-deepseek)](https://circleci.com/gh/ksylvest/omniai-deepseek)
8
+
9
+ An DeepSeek implementation of the [OmniAI](https://github.com/ksylvest/omniai) interface for [deepseek](https://www.deepseek.com/).
10
+
11
+ ## Installation
12
+
13
+ ```sh
14
+ gem install omniai-deepseek
15
+ ```
16
+
17
+ ## Usage
18
+
19
+ ### Client
20
+
21
+ A client is setup as follows if `ENV['DEEPSEEK_API_KEY']` exists:
22
+
23
+ ```ruby
24
+ client = OmniAI::DeepSeek::Client.new
25
+ ```
26
+
27
+ A client may also be passed the following options:
28
+
29
+ - `api_key` (required - default is `ENV['DEEPSEEK_API_KEY']`)
30
+ - `host` (optional)
31
+
32
+ ### Configuration
33
+
34
+ Global configuration is supported for the following options:
35
+
36
+ ```ruby
37
+ OmniAI::DeepSeek.configure do |config|
38
+ config.api_key = 'sk-...' # default: ENV['DEEPSEEK_API_KEY']
39
+ config.host = '...' # default: 'https://api.deepseek.com'
40
+ end
41
+ ```
42
+
43
+ ### Chat
44
+
45
+ A chat completion is generated by passing in a simple text prompt:
46
+
47
+ ```ruby
48
+ completion = client.chat('Tell me a joke!')
49
+ completion.content # 'Why did the chicken cross the road? To get to the other side.'
50
+ ```
51
+
52
+ A chat completion may also be generated by using a prompt builder:
53
+
54
+ ```ruby
55
+ completion = client.chat do |prompt|
56
+ prompt.system('Your are an expert in geography.')
57
+ prompt.user('What is the capital of Canada?')
58
+ end
59
+ completion.content # 'The capital of Canada is Ottawa.'
60
+ ```
61
+
62
+ #### Model
63
+
64
+ `model` takes an optional string (default is `gpt-4o`):
65
+
66
+ ```ruby
67
+ completion = client.chat('How fast is a cheetah?', model: OmniAI::DeepSeek::Chat::Model::REASONER)
68
+ completion.content # 'A cheetah can reach speeds over 100 km/h.'
69
+ ```
70
+
71
+ [DeepSeek API Reference `model`](https://api-docs.deepseek.com/quick_start/pricing)
72
+
73
+ #### Temperature
74
+
75
+ `temperature` takes an optional float between `0.0` and `2.0` (defaults is `0.7`):
76
+
77
+ ```ruby
78
+ completion = client.chat('Pick a number between 1 and 5', temperature: 2.0)
79
+ completion.content # '3'
80
+ ```
81
+
82
+ [DeepSeek API Reference `temperature`](https://api-docs.deepseek.com/quick_start/parameter_settings)
83
+
84
+ #### Stream
85
+
86
+ `stream` takes an optional a proc to stream responses in real-time chunks instead of waiting for a complete response:
87
+
88
+ ```ruby
89
+ stream = proc do |chunk|
90
+ print(chunk.content) # 'Better', 'three', 'hours', ...
91
+ end
92
+ client.chat('Be poetic.', stream:)
93
+ ```
94
+
95
+ [DeepSeek API Reference `stream`](https://platform.deepseek.com/docs/api-reference/chat/create#chat-create-stream)
96
+
97
+ #### Format
98
+
99
+ `format` takes an optional symbol (`:json`) and that setes the `response_format` to `json_object`:
100
+
101
+ ```ruby
102
+ completion = client.chat(format: :json) do |prompt|
103
+ prompt.system(OmniAI::Chat::JSON_PROMPT)
104
+ prompt.user('What is the name of the drummer for the Beatles?')
105
+ end
106
+ JSON.parse(completion.content) # { "name": "Ringo" }
107
+ ```
108
+
109
+ [DeepSeek API Reference `response_format`](https://platform.deepseek.com/docs/api-reference/chat/create#chat-create-stream)
110
+
111
+ > When using JSON mode, you must also instruct the model to produce JSON yourself via a system or user message.
@@ -0,0 +1,44 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module DeepSeek
5
+ # An DeepSeek chat implementation.
6
+ #
7
+ # Usage:
8
+ #
9
+ # completion = OmniAI::DeepSeek::Chat.process!(client: client) do |prompt|
10
+ # prompt.system('You are an expert in the field of AI.')
11
+ # prompt.user('What are the biggest risks of AI?')
12
+ # end
13
+ # completion.choice.message.content # '...'
14
+ class Chat < OmniAI::Chat
15
+ JSON_RESPONSE_FORMAT = { type: "json_object" }.freeze
16
+
17
+ module Model
18
+ CHAT = "deepseek-chat"
19
+ REASONER = "deepseek-reasoner"
20
+ end
21
+
22
+ DEFAULT_MODEL = Model::CHAT
23
+
24
+ protected
25
+
26
+ # @return [Hash]
27
+ def payload
28
+ OmniAI::DeepSeek.config.chat_options.merge({
29
+ messages: @prompt.serialize,
30
+ model: @model,
31
+ stream: @stream.nil? ? nil : !@stream.nil?,
32
+ temperature: @temperature,
33
+ response_format: (JSON_RESPONSE_FORMAT if @format.eql?(:json)),
34
+ tools: (@tools.map(&:serialize) if @tools&.any?),
35
+ }).compact
36
+ end
37
+
38
+ # @return [String]
39
+ def path
40
+ "/chat/completions"
41
+ end
42
+ end
43
+ end
44
+ end
@@ -0,0 +1,63 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module DeepSeek
5
+ # An DeepSeek client implementation. Usage:
6
+ #
7
+ # w/ `api_key``:
8
+ # client = OmniAI::DeepSeek::Client.new(api_key: '...')
9
+ #
10
+ # w/ ENV['DEEPSEEK_API_KEY']:
11
+ #
12
+ # ENV['DEEPSEEK_API_KEY'] = '...'
13
+ # client = OmniAI::DeepSeek::Client.new
14
+ #
15
+ # w/ config:
16
+ #
17
+ # OmniAI::DeepSeek.configure do |config|
18
+ # config.api_key = '...'
19
+ # end
20
+ #
21
+ # client = OmniAI::DeepSeek::Client.new
22
+ class Client < OmniAI::Client
23
+ # @param api_key [String, nil] optional - defaults to `OmniAI::DeepSeek.config.api_key`
24
+ # @param host [String] optional - defaults to `OmniAI::DeepSeek.config.host`
25
+ # @param logger [Logger, nil] optional - defaults to `OmniAI::DeepSeek.config.logger`
26
+ # @param timeout [Integer, nil] optional - defaults to `OmniAI::DeepSeek.config.timeout`
27
+ def initialize(
28
+ api_key: OmniAI::DeepSeek.config.api_key,
29
+ host: OmniAI::DeepSeek.config.host,
30
+ logger: OmniAI::DeepSeek.config.logger,
31
+ timeout: OmniAI::DeepSeek.config.timeout
32
+ )
33
+ super
34
+ end
35
+
36
+ # @return [HTTP::Client]
37
+ def connection
38
+ @connection ||= begin
39
+ http = super
40
+ http = http.auth("Bearer #{@api_key}") if @api_key
41
+ http
42
+ end
43
+ end
44
+
45
+ # @raise [OmniAI::Error]
46
+ #
47
+ # @param messages [String] optional
48
+ # @param model [String] optional
49
+ # @param format [Symbol] optional :text or :json
50
+ # @param temperature [Float, nil] optional
51
+ # @param stream [Proc, nil] optional
52
+ # @param tools [Array<OmniAI::Tool>, nil] optional
53
+ #
54
+ # @yield [prompt]
55
+ # @yieldparam prompt [OmniAI::Chat::Prompt]
56
+ #
57
+ # @return [OmniAI::Chat::Completion]
58
+ def chat(messages = nil, model: Chat::DEFAULT_MODEL, temperature: nil, format: nil, stream: nil, tools: nil, &)
59
+ Chat.process!(messages, model:, temperature:, format:, stream:, tools:, client: self, &)
60
+ end
61
+ end
62
+ end
63
+ end
@@ -0,0 +1,23 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module DeepSeek
5
+ # Configuration for DeepSeek.
6
+ class Config < OmniAI::Config
7
+ DEFAULT_HOST = "https://api.deepseek.com"
8
+
9
+ # @param api_key [String, nil] optional - defaults to `ENV['DEEPSEEK_API_KEY']`
10
+ # @param host [String, nil] optional - defaults to ENV['DEEPSEEK_HOST'] w/ fallback to `DEFAULT_HOST`
11
+ # @param logger [Logger, nil] optional
12
+ # @param timeout [Integer, Hash, nil] optional
13
+ def initialize(
14
+ api_key: ENV.fetch("DEEPSEEK_API_KEY", nil),
15
+ host: ENV.fetch("DEEPSEEK_HOST", DEFAULT_HOST),
16
+ logger: nil,
17
+ timeout: nil
18
+ )
19
+ super
20
+ end
21
+ end
22
+ end
23
+ end
@@ -0,0 +1,7 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ module DeepSeek
5
+ VERSION = "0.1.0"
6
+ end
7
+ end
@@ -0,0 +1,25 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "event_stream_parser"
4
+ require "omniai"
5
+ require "zeitwerk"
6
+
7
+ loader = Zeitwerk::Loader.for_gem
8
+ loader.push_dir(__dir__, namespace: OmniAI)
9
+ loader.inflector.inflect "deepseek" => "DeepSeek"
10
+ loader.setup
11
+
12
+ module OmniAI
13
+ # A namespace for everything DeepSeek.
14
+ module DeepSeek
15
+ # @return [OmniAI::DeepSeek::Config]
16
+ def self.config
17
+ @config ||= Config.new
18
+ end
19
+
20
+ # @yield [OmniAI::DeepSeek::Config]
21
+ def self.configure
22
+ yield config
23
+ end
24
+ end
25
+ end
metadata ADDED
@@ -0,0 +1,92 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: omniai-deepseek
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.0
5
+ platform: ruby
6
+ authors:
7
+ - Kevin Sylvestre
8
+ bindir: exe
9
+ cert_chain: []
10
+ date: 2025-01-30 00:00:00.000000000 Z
11
+ dependencies:
12
+ - !ruby/object:Gem::Dependency
13
+ name: event_stream_parser
14
+ requirement: !ruby/object:Gem::Requirement
15
+ requirements:
16
+ - - ">="
17
+ - !ruby/object:Gem::Version
18
+ version: '0'
19
+ type: :runtime
20
+ prerelease: false
21
+ version_requirements: !ruby/object:Gem::Requirement
22
+ requirements:
23
+ - - ">="
24
+ - !ruby/object:Gem::Version
25
+ version: '0'
26
+ - !ruby/object:Gem::Dependency
27
+ name: omniai
28
+ requirement: !ruby/object:Gem::Requirement
29
+ requirements:
30
+ - - ">="
31
+ - !ruby/object:Gem::Version
32
+ version: '0'
33
+ type: :runtime
34
+ prerelease: false
35
+ version_requirements: !ruby/object:Gem::Requirement
36
+ requirements:
37
+ - - ">="
38
+ - !ruby/object:Gem::Version
39
+ version: '0'
40
+ - !ruby/object:Gem::Dependency
41
+ name: zeitwerk
42
+ requirement: !ruby/object:Gem::Requirement
43
+ requirements:
44
+ - - ">="
45
+ - !ruby/object:Gem::Version
46
+ version: '0'
47
+ type: :runtime
48
+ prerelease: false
49
+ version_requirements: !ruby/object:Gem::Requirement
50
+ requirements:
51
+ - - ">="
52
+ - !ruby/object:Gem::Version
53
+ version: '0'
54
+ description: An implementation of OmniAI for DeepSeek
55
+ email:
56
+ - kevin@ksylvest.com
57
+ executables: []
58
+ extensions: []
59
+ extra_rdoc_files: []
60
+ files:
61
+ - Gemfile
62
+ - README.md
63
+ - lib/omniai/deepseek.rb
64
+ - lib/omniai/deepseek/chat.rb
65
+ - lib/omniai/deepseek/client.rb
66
+ - lib/omniai/deepseek/config.rb
67
+ - lib/omniai/deepseek/version.rb
68
+ homepage: https://github.com/ksylvest/omniai-deepseek
69
+ licenses:
70
+ - MIT
71
+ metadata:
72
+ homepage_uri: https://github.com/ksylvest/omniai-deepseek
73
+ changelog_uri: https://github.com/ksylvest/omniai-deepseek/releases
74
+ rubygems_mfa_required: 'true'
75
+ rdoc_options: []
76
+ require_paths:
77
+ - lib
78
+ required_ruby_version: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - ">="
81
+ - !ruby/object:Gem::Version
82
+ version: 3.2.0
83
+ required_rubygems_version: !ruby/object:Gem::Requirement
84
+ requirements:
85
+ - - ">="
86
+ - !ruby/object:Gem::Version
87
+ version: '0'
88
+ requirements: []
89
+ rubygems_version: 3.6.2
90
+ specification_version: 4
91
+ summary: A generalized framework for interacting with DeepSeek
92
+ test_files: []