gen-ai 0.0.1 → 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: e8b3a8ca11b07cadcaf16a1fce985173cd14982a2f575189bbdc631f2306d745
4
- data.tar.gz: f64ecb2bcf5ef48bd66598261db1005bbffcba45596c6339c35ae51f1b257232
3
+ metadata.gz: a51511a0ca7cd09a4505151799e5d797d87d4418bb4a250e0436fab78317ac1b
4
+ data.tar.gz: 1719adc9ca432e9da3420492651ea3ef2a10b7f990fe3607c28cf8187f4c6811
5
5
  SHA512:
6
- metadata.gz: 105c83eee425a422b64bb257dc9fc20c5f6619daf3dd12165ac03e585cf6ffa0752dd3b45118bf1330db57cfa8a24df025e6de9c2e9cf9d2360d8decc4440878
7
- data.tar.gz: 166c15562a30f0af948a18ad4773d4704c594d9a28e1aaab948338867a634c42d4937596114c13cdbaefe8efc7901125e71484c3743a865d0d204fb05306cc24
6
+ metadata.gz: f482fba1992856a4009aabdebd849cf245d8069426da935999c3cbb323b02a36e916c2ab299c6fffd0ba7e5c320ddda001a9abb625063ea9c8a202376151f225
7
+ data.tar.gz: 155f5f3e8d9b5c0234075635d4623300c0dcbed2fb63b2a8ee4bf2199a5bb75a6dbe8d864028078e5275b72c79a05836c7e26a4b4f49e179d80744caa8dd881d
data/.rubocop.yml ADDED
@@ -0,0 +1,38 @@
1
+ AllCops:
2
+ NewCops: enable
3
+ TargetRubyVersion: 2.7
4
+
5
+ Documentation:
6
+ Enabled: false
7
+
8
+ Gemspec/DevelopmentDependencies:
9
+ Enabled: false
10
+
11
+ Lint/MissingSuper:
12
+ Enabled: false
13
+
14
+ Lint/NonDeterministicRequireOrder:
15
+ Enabled: false
16
+
17
+ Lint/UnusedMethodArgument:
18
+ Enabled: false
19
+
20
+ Layout/FirstArrayElementIndentation:
21
+ Enabled: false
22
+
23
+ Layout/FirstHashElementIndentation:
24
+ Enabled: false
25
+
26
+ Layout/SpaceInsideHashLiteralBraces:
27
+ Enabled: false
28
+
29
+ Layout/ArgumentAlignment:
30
+ Enabled: false
31
+
32
+ Layout/LineLength:
33
+ Exclude:
34
+ - spec/**/*.rb
35
+
36
+ Metrics/BlockLength:
37
+ Exclude:
38
+ - spec/**/*.rb
data/Rakefile CHANGED
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "bundler/gem_tasks"
4
- require "rspec/core/rake_task"
3
+ require 'bundler/gem_tasks'
4
+ require 'rspec/core/rake_task'
5
5
 
6
6
  RSpec::Core::RakeTask.new(:spec)
7
7
 
data/gen-ai.gemspec CHANGED
@@ -1,23 +1,23 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require_relative "lib/version"
3
+ require_relative 'lib/gen_ai/version'
4
4
 
5
5
  Gem::Specification.new do |spec|
6
- spec.name = "gen-ai"
6
+ spec.name = 'gen-ai'
7
7
  spec.version = GenAI::VERSION
8
- spec.authors = ["Alex Chaplinsky"]
9
- spec.email = ["alchaplinsky@gmail.com"]
8
+ spec.authors = ['Alex Chaplinsky']
9
+ spec.email = ['alchaplinsky@gmail.com']
10
10
 
11
- spec.summary = "Generative AI toolset for Ruby."
12
- spec.description = "Generative AI toolset for Ruby."
13
- spec.homepage = "https://github.com/alchaplinsky/gen-ai"
14
- spec.required_ruby_version = ">= 2.6.0"
11
+ spec.summary = 'Generative AI toolset for Ruby.'
12
+ spec.description = 'Generative AI toolset for Ruby.'
13
+ spec.homepage = 'https://github.com/alchaplinsky/gen-ai'
14
+ spec.required_ruby_version = '>= 2.7.0'
15
15
 
16
16
  # spec.metadata["allowed_push_host"] = "TODO: Set to your gem server 'https://example.com'"
17
17
 
18
- spec.metadata["homepage_uri"] = spec.homepage
19
- spec.metadata["source_code_uri"] = "https://github.com/alchaplinsky/gen-ai"
20
- spec.metadata["changelog_uri"] = "https://github.com/alchaplinsky/gen-ai/blob/master/CHANGELOG.md"
18
+ spec.metadata['homepage_uri'] = spec.homepage
19
+ spec.metadata['source_code_uri'] = 'https://github.com/alchaplinsky/gen-ai'
20
+ spec.metadata['changelog_uri'] = 'https://github.com/alchaplinsky/gen-ai/blob/master/CHANGELOG.md'
21
21
 
22
22
  # Specify which files should be added to the gem when it is released.
23
23
  # The `git ls-files -z` loads the files in the RubyGem that have been added into git.
@@ -27,13 +27,16 @@ Gem::Specification.new do |spec|
27
27
  f.start_with?(*%w[bin/ test/ spec/ features/ .git .circleci appveyor Gemfile])
28
28
  end
29
29
  end
30
- spec.bindir = "exe"
30
+ spec.bindir = 'exe'
31
31
  spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
32
- spec.require_paths = ["lib"]
32
+ spec.require_paths = ['lib']
33
33
 
34
34
  # Uncomment to register a new dependency of your gem
35
- # spec.add_dependency "example-gem", "~> 1.0"
35
+ spec.add_dependency 'zeitwerk', '~> 2.6'
36
36
 
37
+ spec.add_development_dependency 'google_palm_api', '~> 0.1'
38
+ spec.add_development_dependency 'ruby-openai', '~> 5.1'
37
39
  # For more information and examples about making a new gem, check out our
38
40
  # guide at: https://bundler.io/guides/creating_gem.html
41
+ spec.metadata['rubygems_mfa_required'] = 'true'
39
42
  end
@@ -0,0 +1,42 @@
1
+ # frozen_string_literal: true
2
+
3
+ module GenAI
4
+ module Dependency
5
+ class VersionError < StandardError; end
6
+
7
+ def depends_on(*names)
8
+ names.each { |name| load_dependency(name) }
9
+ end
10
+
11
+ private
12
+
13
+ def load_dependency(name)
14
+ gem(name)
15
+
16
+ return true unless defined? Bundler
17
+
18
+ gem_spec = Gem::Specification.find_by_name(name)
19
+ gem_requirement = dependencies.find { |gem| gem.name == gem_spec.name }.requirement
20
+
21
+ unless gem_requirement.satisfied_by?(gem_spec.version)
22
+ raise VersionError, version_error(gem_spec, gem_requirement)
23
+ end
24
+
25
+ require_gem(gem_spec)
26
+ end
27
+
28
+ def version_error(gem_spec, gem_requirement)
29
+ "'#{gem_spec.name}' gem version is #{gem_spec.version}, but your Gemfile specified #{gem_requirement}."
30
+ end
31
+
32
+ def require_gem(gem_spec)
33
+ gem_spec.full_require_paths.each do |path|
34
+ Dir.glob("#{path}/*.rb").each { |file| require file }
35
+ end
36
+ end
37
+
38
+ def dependencies
39
+ Bundler.load.dependencies
40
+ end
41
+ end
42
+ end
@@ -0,0 +1,46 @@
1
+ # frozen_string_literal: true
2
+
3
+ module GenAI
4
+ class Language
5
+ class Base
6
+ include GenAI::Dependency
7
+
8
+ DEFAULT_ROLE = 'user'
9
+
10
+ def embed(...)
11
+ raise NotImplementedError, "#{self.class.name} does not support embedding"
12
+ end
13
+
14
+ def complete(...)
15
+ raise NotImplementedError, "#{self.class.name} does not support completion"
16
+ end
17
+
18
+ def chat(...)
19
+ raise NotImplementedError, "#{self.class.name} does not support conversations"
20
+ end
21
+
22
+ private
23
+
24
+ attr_reader :client
25
+
26
+ def handle_errors
27
+ response = yield
28
+ return if response.empty?
29
+
30
+ if response['error']
31
+ raise GenAI::ApiError, "#{api_provider_name} API error: #{response.dig('error', 'message')}"
32
+ end
33
+
34
+ response
35
+ end
36
+
37
+ def api_provider_name
38
+ self.class.name.split('::').last
39
+ end
40
+
41
+ def build_result(model:, raw:, parsed:)
42
+ GenAI::Result.new(provider: @provider, model: model, raw: raw, values: parsed)
43
+ end
44
+ end
45
+ end
46
+ end
@@ -0,0 +1,104 @@
1
+ # frozen_string_literal: true
2
+
3
+ module GenAI
4
+ class Language
5
+ class GooglePalm < Base
6
+ DEFAULT_ROLE = '0'
7
+ EMBEDDING_MODEL = 'textembedding-gecko-001'
8
+ COMPLETION_MODEL = 'text-bison-001'
9
+ CHAT_COMPLETION_MODEL = 'chat-bison-001'
10
+
11
+ def initialize(token:, options: {})
12
+ depends_on 'google_palm_api'
13
+
14
+ @provider = :google_palm
15
+ @client = ::GooglePalmApi::Client.new(api_key: token)
16
+ end
17
+
18
+ def embed(input, model: nil)
19
+ responses = array_wrap(input).map do |text|
20
+ handle_errors { client.embed(text: text, model: model) }
21
+ end
22
+
23
+ build_result(
24
+ model: model || EMBEDDING_MODEL,
25
+ raw: { 'data' => responses, 'usage' => {} },
26
+ parsed: extract_embeddings(responses)
27
+ )
28
+ end
29
+
30
+ def complete(prompt, options: {})
31
+ parameters = build_completion_options(prompt, options)
32
+
33
+ response = handle_errors { client.generate_text(**parameters) }
34
+
35
+ build_result(
36
+ model: parameters[:model],
37
+ raw: response.merge('usage' => {}),
38
+ parsed: extract_completions(response)
39
+ )
40
+ end
41
+
42
+ def chat(message, context: nil, history: [], examples: [], options: {})
43
+ parameters = build_chat_options(message, context, history, examples, options)
44
+
45
+ response = handle_errors { client.generate_chat_message(**parameters) }
46
+
47
+ build_result(
48
+ model: parameters[:model],
49
+ raw: response.merge('usage' => {}),
50
+ parsed: extract_chat_messages(response)
51
+ )
52
+ end
53
+
54
+ private
55
+
56
+ def build_chat_options(message, context, history, examples, options)
57
+ {
58
+ model: options.delete(:model) || CHAT_COMPLETION_MODEL,
59
+ messages: history.append({ author: DEFAULT_ROLE, content: message }),
60
+ examples: compose_examples(examples),
61
+ context: context
62
+ }.merge(options)
63
+ end
64
+
65
+ def build_completion_options(prompt, options)
66
+ {
67
+ prompt: prompt,
68
+ model: options.delete(:model) || COMPLETION_MODEL
69
+ }.merge(options)
70
+ end
71
+
72
+ def compose_examples(examples)
73
+ examples.each_slice(2).map do |example|
74
+ {
75
+ input: { content: symbolize(example.first)[:content] },
76
+ output: { content: symbolize(example.last)[:content] }
77
+ }
78
+ end
79
+ end
80
+
81
+ def symbolize(hash)
82
+ hash.transform_keys(&:to_sym)
83
+ end
84
+
85
+ def array_wrap(object)
86
+ return [] if object.nil?
87
+
88
+ object.respond_to?(:to_ary) ? object.to_ary || [object] : [object]
89
+ end
90
+
91
+ def extract_embeddings(responses)
92
+ responses.map { |response| response.dig('embedding', 'value') }
93
+ end
94
+
95
+ def extract_completions(response)
96
+ response['candidates'].map { |candidate| candidate['output'] }
97
+ end
98
+
99
+ def extract_chat_messages(response)
100
+ response['candidates'].map { |candidate| candidate['content'] }
101
+ end
102
+ end
103
+ end
104
+ end
@@ -0,0 +1,72 @@
1
+ # frozen_string_literal: true
2
+
3
+ module GenAI
4
+ class Language
5
+ class OpenAI < Base
6
+ EMBEDDING_MODEL = 'text-embedding-ada-002'
7
+ COMPLETION_MODEL = 'gpt-3.5-turbo'
8
+
9
+ def initialize(token:, options: {})
10
+ depends_on 'ruby-openai'
11
+ @provider = :openai
12
+ @client = ::OpenAI::Client.new(access_token: token)
13
+ end
14
+
15
+ def embed(input, model: nil)
16
+ parameters = { input: input, model: model || EMBEDDING_MODEL }
17
+
18
+ response = handle_errors { client.embeddings(parameters: parameters) }
19
+
20
+ build_result(model: parameters[:model], raw: response, parsed: extract_embeddings(response))
21
+ end
22
+
23
+ def complete(prompt, options: {})
24
+ parameters = build_completion_options(prompt, options)
25
+
26
+ response = handle_errors { client.chat(parameters: parameters) }
27
+
28
+ build_result(model: parameters[:model], raw: response, parsed: extract_completions(response))
29
+ end
30
+
31
+ def chat(message, context: nil, history: [], examples: [], options: {})
32
+ parameters = build_chat_options(message, context, history, examples, options)
33
+
34
+ response = handle_errors { client.chat(parameters: parameters) }
35
+
36
+ build_result(model: parameters[:model], raw: response, parsed: extract_completions(response))
37
+ end
38
+
39
+ private
40
+
41
+ def build_chat_options(message, context, history, examples, options)
42
+ messages = []
43
+ messages.concat(examples)
44
+ messages.concat(history)
45
+
46
+ messages.prepend({ role: 'system', content: context }) if context
47
+
48
+ messages.append({ role: DEFAULT_ROLE, content: message })
49
+
50
+ {
51
+ messages: messages,
52
+ model: options.delete(:model) || COMPLETION_MODEL
53
+ }.merge(options)
54
+ end
55
+
56
+ def build_completion_options(prompt, options)
57
+ {
58
+ messages: [{ role: DEFAULT_ROLE, content: prompt }],
59
+ model: options.delete(:model) || COMPLETION_MODEL
60
+ }.merge(options)
61
+ end
62
+
63
+ def extract_embeddings(response)
64
+ response['data'].map { |datum| datum['embedding'] }
65
+ end
66
+
67
+ def extract_completions(response)
68
+ response['choices'].map { |choice| choice.dig('message', 'content') }
69
+ end
70
+ end
71
+ end
72
+ end
@@ -0,0 +1,47 @@
1
+ # frozen_string_literal: true
2
+
3
+ module GenAI
4
+ class Language
5
+ def initialize(provider, token, options: {})
6
+ build_llm(provider, token, options)
7
+ end
8
+
9
+ def embed(text, model: nil)
10
+ llm.embed(text, model: model)
11
+ end
12
+
13
+ def complete(prompt, options = {})
14
+ llm.complete(prompt, options: options)
15
+ end
16
+
17
+ def chat(message, context: nil, history: [], examples: [], **options)
18
+ llm.chat(message, context: context, history: history, examples: examples, options: options)
19
+ end
20
+
21
+ # def answer(prompt); end
22
+
23
+ # def sentiment(text); end
24
+
25
+ # def keywords(text); end
26
+
27
+ # def summarization(text); end
28
+
29
+ # def translation(text, _target:); end
30
+
31
+ # def correction(text); end
32
+
33
+ private
34
+
35
+ attr_reader :llm
36
+
37
+ def build_llm(provider, token, options)
38
+ klass = GenAI::Language.constants.find do |const|
39
+ const.to_s.downcase == provider.to_s.downcase.gsub('_', '')
40
+ end
41
+
42
+ raise UnsupportedProvider, "Unsupported LLM provider '#{provider}'" unless klass
43
+
44
+ @llm = GenAI::Language.const_get(klass).new(token: token, options: options)
45
+ end
46
+ end
47
+ end
@@ -0,0 +1,39 @@
1
+ # frozen_string_literal: true
2
+
3
+ module GenAI
4
+ class Result
5
+ attr_reader :raw, :provider, :model, :values
6
+
7
+ def initialize(provider:, model:, raw:, values:)
8
+ @raw = raw
9
+ @provider = provider
10
+ @model = model
11
+ @values = values
12
+ end
13
+
14
+ def value
15
+ values.first
16
+ end
17
+
18
+ def prompt_tokens
19
+ usage['prompt_tokens']
20
+ end
21
+
22
+ def completion_tokens
23
+ if usage['completion_tokens'] ||
24
+ (total_tokens && prompt_tokens)
25
+ total_tokens.to_i - prompt_tokens.to_i
26
+ end
27
+ end
28
+
29
+ def total_tokens
30
+ usage['total_tokens']
31
+ end
32
+
33
+ private
34
+
35
+ def usage
36
+ raw['usage']
37
+ end
38
+ end
39
+ end
@@ -0,0 +1,5 @@
1
+ # frozen_string_literal: true
2
+
3
+ module GenAI
4
+ VERSION = '0.1.0'
5
+ end
data/lib/gen_ai.rb CHANGED
@@ -1,13 +1,14 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require_relative "version"
3
+ require 'zeitwerk'
4
+ require 'pry'
5
+
6
+ loader = Zeitwerk::Loader.for_gem
7
+ loader.inflector.inflect('gen_ai' => 'GenAI', 'open_ai' => 'OpenAI')
8
+ loader.setup
4
9
 
5
10
  module GenAI
6
11
  class Error < StandardError; end
7
- class UnsupportedConfiguration < Error; end
8
- # Your code goes here...
12
+ class ApiError < Error; end
13
+ class UnsupportedProvider < Error; end
9
14
  end
10
-
11
- require_relative "language/google_palm"
12
- require_relative "language/open_ai"
13
- require_relative "language"
metadata CHANGED
@@ -1,15 +1,57 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: gen-ai
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.1
4
+ version: 0.1.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Alex Chaplinsky
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2023-10-14 00:00:00.000000000 Z
12
- dependencies: []
11
+ date: 2023-10-19 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: zeitwerk
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - "~>"
18
+ - !ruby/object:Gem::Version
19
+ version: '2.6'
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - "~>"
25
+ - !ruby/object:Gem::Version
26
+ version: '2.6'
27
+ - !ruby/object:Gem::Dependency
28
+ name: google_palm_api
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - "~>"
32
+ - !ruby/object:Gem::Version
33
+ version: '0.1'
34
+ type: :development
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: '0.1'
41
+ - !ruby/object:Gem::Dependency
42
+ name: ruby-openai
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - "~>"
46
+ - !ruby/object:Gem::Version
47
+ version: '5.1'
48
+ type: :development
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - "~>"
53
+ - !ruby/object:Gem::Version
54
+ version: '5.1'
13
55
  description: Generative AI toolset for Ruby.
14
56
  email:
15
57
  - alchaplinsky@gmail.com
@@ -18,16 +60,20 @@ extensions: []
18
60
  extra_rdoc_files: []
19
61
  files:
20
62
  - ".rspec"
63
+ - ".rubocop.yml"
21
64
  - CHANGELOG.md
22
65
  - CODE_OF_CONDUCT.md
23
66
  - README.md
24
67
  - Rakefile
25
68
  - gen-ai.gemspec
26
69
  - lib/gen_ai.rb
27
- - lib/language.rb
28
- - lib/language/google_palm.rb
29
- - lib/language/open_ai.rb
30
- - lib/version.rb
70
+ - lib/gen_ai/dependency.rb
71
+ - lib/gen_ai/language.rb
72
+ - lib/gen_ai/language/base.rb
73
+ - lib/gen_ai/language/google_palm.rb
74
+ - lib/gen_ai/language/open_ai.rb
75
+ - lib/gen_ai/result.rb
76
+ - lib/gen_ai/version.rb
31
77
  - sig/gen_ai.rbs
32
78
  homepage: https://github.com/alchaplinsky/gen-ai
33
79
  licenses: []
@@ -35,6 +81,7 @@ metadata:
35
81
  homepage_uri: https://github.com/alchaplinsky/gen-ai
36
82
  source_code_uri: https://github.com/alchaplinsky/gen-ai
37
83
  changelog_uri: https://github.com/alchaplinsky/gen-ai/blob/master/CHANGELOG.md
84
+ rubygems_mfa_required: 'true'
38
85
  post_install_message:
39
86
  rdoc_options: []
40
87
  require_paths:
@@ -43,14 +90,14 @@ required_ruby_version: !ruby/object:Gem::Requirement
43
90
  requirements:
44
91
  - - ">="
45
92
  - !ruby/object:Gem::Version
46
- version: 2.6.0
93
+ version: 2.7.0
47
94
  required_rubygems_version: !ruby/object:Gem::Requirement
48
95
  requirements:
49
96
  - - ">="
50
97
  - !ruby/object:Gem::Version
51
98
  version: '0'
52
99
  requirements: []
53
- rubygems_version: 3.2.33
100
+ rubygems_version: 3.3.7
54
101
  signing_key:
55
102
  specification_version: 4
56
103
  summary: Generative AI toolset for Ruby.
@@ -1,9 +0,0 @@
1
- module GenAI
2
- class Language
3
- class GooglePalm
4
- def initialize(token:, options: {})
5
- @token = token
6
- end
7
- end
8
- end
9
- end
@@ -1,9 +0,0 @@
1
- module GenAI
2
- class Language
3
- class OpenAI
4
- def initialize(token:, options: {})
5
- @token = token
6
- end
7
- end
8
- end
9
- end
data/lib/language.rb DELETED
@@ -1,48 +0,0 @@
1
- module GenAI
2
- class Language
3
- def initialize(provider, token, options: {})
4
- @provider = provider
5
- @token = token
6
- end
7
-
8
- def answer(question, context: {})
9
- return 'Yes, it is.'
10
- end
11
-
12
- def completion(prompt, options: {})
13
- end
14
-
15
- def conversation(prompt, options: {})
16
- end
17
-
18
- def embedding(text)
19
- end
20
-
21
- def sentiment(text)
22
- end
23
-
24
- def keywords(text)
25
- end
26
-
27
- def summarization(text)
28
- end
29
-
30
- def translation(text, target:)
31
- end
32
-
33
- def correction(text)
34
- end
35
-
36
-
37
- def llm
38
- case @provider
39
- when :openai
40
- GenAI::Language::OpenAI.new(token: @token)
41
- when :google_palm
42
- GenAI::Language::GooglePalm.new(token: @token)
43
- else
44
- raise UnsupportedConfiguration.new "Unknown LLM provider"
45
- end
46
- end
47
- end
48
- end
data/lib/version.rb DELETED
@@ -1,3 +0,0 @@
1
- module GenAI
2
- VERSION = "0.0.1"
3
- end