i18n-ai 0.1.0 → 0.1.3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.rubocop.yml +12 -0
- data/README.md +31 -3
- data/lib/i18n-ai.rb +2 -18
- data/lib/i18n_ai/clients/anthropic_client.rb +43 -0
- data/lib/i18n_ai/clients/base_client.rb +22 -0
- data/lib/i18n_ai/clients/open_ai_client.rb +45 -0
- data/lib/i18n_ai/configuration.rb +13 -0
- data/lib/i18n_ai/railtie.rb +121 -0
- data/lib/{i18n-ai → i18n_ai}/version.rb +1 -1
- data/lib/i18n_ai.rb +22 -0
- metadata +43 -6
- data/lib/i18n-ai/configuration.rb +0 -9
- data/lib/i18n-ai/railtie.rb +0 -75
- data/sig/i18n/ai.rbs +0 -6
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 91c87df3579aa11f253f670664cbcf4721dc4145cb12740ab05f9f81a8fef913
|
4
|
+
data.tar.gz: 590b0626a6e5dee174d34ddd429154485a7345539bc398815851ca5c4bd50ea3
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: '08ecd1e198abe8f8363ca6bc227a36a00335f6cc52024b7c5f6ef2013e6a0205a860bdcbc102c34b7953b0708d7f02bc7379946deb7f0b2e702314ffd3f6a143'
|
7
|
+
data.tar.gz: 4e3b1a31c6112457ed53f274d32cf98c21bae53bffcc69d8b91e2ec79403790a4090fbb69a37bc4e7cdd2f30ad99a232777160e29c0944750906c4b1d477483b
|
data/.rubocop.yml
CHANGED
@@ -6,3 +6,15 @@ Style/StringLiterals:
|
|
6
6
|
|
7
7
|
Style/StringLiteralsInInterpolation:
|
8
8
|
EnforcedStyle: double_quotes
|
9
|
+
|
10
|
+
Metrics/BlockLength:
|
11
|
+
Exclude:
|
12
|
+
- 'spec/**/*_spec.rb'
|
13
|
+
|
14
|
+
Layout/LineLength:
|
15
|
+
Exclude:
|
16
|
+
- 'spec/**/*_spec.rb'
|
17
|
+
|
18
|
+
Naming/FileName:
|
19
|
+
Exclude:
|
20
|
+
- 'lib/i18n-ai.rb'
|
data/README.md
CHANGED
@@ -14,19 +14,47 @@ gem "i18n-ai"
|
|
14
14
|
|
15
15
|
And do `bundle install`.
|
16
16
|
|
17
|
-
##
|
17
|
+
## Configuration
|
18
18
|
|
19
|
-
|
19
|
+
To use I18nAi, you need to set the appropriate environment variables based on the AI service you wish to use.
|
20
20
|
|
21
|
-
|
21
|
+
### OpenAi
|
22
|
+
|
23
|
+
If you are using OpenAI, set `ENV["OPENAI_ACCESS_TOKEN"]`
|
24
|
+
|
25
|
+
To configure and enable other locales, create a file `config/initializers/i18n_aii.rb` and add the following:
|
22
26
|
|
23
27
|
```
|
24
28
|
# config/initializers/i18n_ai.rb
|
25
29
|
I18nAi.configure do |config|
|
30
|
+
config.ai_settings = {
|
31
|
+
provider: "openai",
|
32
|
+
model: "gpt-4o-mini",
|
33
|
+
access_token: ENV["OPENAI_ACCESS_TOKEN"]
|
34
|
+
}
|
26
35
|
config.generate_locales = [:es, :it] # add your other supported locales to this array
|
27
36
|
end
|
28
37
|
```
|
29
38
|
|
39
|
+
### Anthropic's Claude
|
40
|
+
|
41
|
+
If you prefer to use Anthropic's Claude, set `ENV[ANTHROPIC_API_KEY]`
|
42
|
+
|
43
|
+
To configure and enable other locales, create a file `config/initializers/i18n_ai.rb` and add the following:
|
44
|
+
|
45
|
+
```
|
46
|
+
I18nAi.configure do |config|
|
47
|
+
config.ai_settings = {
|
48
|
+
provider: "anthropic",
|
49
|
+
model: "claude-3-haiku-20240307",
|
50
|
+
access_token: ENV["ANTHROPIC_ACCESS_TOKEN"]
|
51
|
+
}
|
52
|
+
config.generate_locales = [:es]
|
53
|
+
end
|
54
|
+
```
|
55
|
+
|
56
|
+
## Usage
|
57
|
+
|
30
58
|
Every page reload, the gem will check if the `en.yml` file changed and if it did, it will automatically generate the configured locale files.
|
31
59
|
|
32
60
|
The gem has been setup to generate a `es.yml` file by default.
|
data/lib/i18n-ai.rb
CHANGED
@@ -1,19 +1,3 @@
|
|
1
|
-
|
2
|
-
require_relative "i18n-ai/railtie"
|
3
|
-
require_relative "i18n-ai/configuration"
|
1
|
+
# frozen_string_literal: true
|
4
2
|
|
5
|
-
|
6
|
-
class Error < StandardError; end
|
7
|
-
|
8
|
-
class << self
|
9
|
-
attr_writer :configuration
|
10
|
-
|
11
|
-
def configuration
|
12
|
-
@configuration ||= Configuration.new
|
13
|
-
end
|
14
|
-
|
15
|
-
def configure
|
16
|
-
yield(configuration)
|
17
|
-
end
|
18
|
-
end
|
19
|
-
end
|
3
|
+
require_relative "i18n_ai"
|
@@ -0,0 +1,43 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "anthropic"
|
4
|
+
require_relative "base_client"
|
5
|
+
|
6
|
+
module I18nAi
|
7
|
+
module Clients
|
8
|
+
# The AnthropicClient class is responsible for interacting with the Anthropic API
|
9
|
+
class AnthropicClient < BaseClient
|
10
|
+
def initialize
|
11
|
+
super
|
12
|
+
@client = Anthropic::Client.new(
|
13
|
+
access_token: @config[:access_token]
|
14
|
+
)
|
15
|
+
end
|
16
|
+
|
17
|
+
def chat(locale, text)
|
18
|
+
response = @client.messages(
|
19
|
+
parameters: {
|
20
|
+
model: @config[:model],
|
21
|
+
messages: [{ role: "user", content: content(locale, text) }]
|
22
|
+
}
|
23
|
+
)
|
24
|
+
|
25
|
+
parse_response(response)
|
26
|
+
rescue StandardError => e
|
27
|
+
handle_error(e)
|
28
|
+
end
|
29
|
+
|
30
|
+
private
|
31
|
+
|
32
|
+
def parse_response(response)
|
33
|
+
response.dig("content", 0, "text")
|
34
|
+
rescue TypeError, NoMethodError => e
|
35
|
+
handle_error(e)
|
36
|
+
end
|
37
|
+
|
38
|
+
def handle_error(error)
|
39
|
+
puts "Error: #{error.message}"
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
@@ -0,0 +1,22 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module I18nAi
|
4
|
+
module Clients
|
5
|
+
# The BaseClient class serves as a base class for all AI client implementations
|
6
|
+
class BaseClient
|
7
|
+
def initialize
|
8
|
+
@config = I18nAi.configuration.ai_settings
|
9
|
+
end
|
10
|
+
|
11
|
+
def content(locale, text_to_translate)
|
12
|
+
# rubocop:disable Layout/LineLength
|
13
|
+
"Translate the following YAML content to #{locale.to_s.upcase} and make sure to retain the keys in english except the first key which is the 2 letter language code:\n\n#{text_to_translate}"
|
14
|
+
# rubocop:enable Layout/LineLength
|
15
|
+
end
|
16
|
+
|
17
|
+
def parse_response(response)
|
18
|
+
raise NotImplementedError, "Subclasses must implement this method"
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
@@ -0,0 +1,45 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "openai"
|
4
|
+
require_relative "base_client"
|
5
|
+
|
6
|
+
module I18nAi
|
7
|
+
module Clients
|
8
|
+
# The AnthropicClient class is responsible for interacting with the OpenAI API
|
9
|
+
class OpenAiClient < BaseClient
|
10
|
+
def initialize
|
11
|
+
super
|
12
|
+
@client = OpenAI::Client.new(
|
13
|
+
access_token: @config[:access_token],
|
14
|
+
log_errors: true
|
15
|
+
)
|
16
|
+
end
|
17
|
+
|
18
|
+
def chat(locale, text)
|
19
|
+
response = @client.chat(
|
20
|
+
parameters: {
|
21
|
+
model: @config[:model],
|
22
|
+
messages: [{ role: "user", content: content(locale, text) }],
|
23
|
+
max_tokens: 5000
|
24
|
+
}
|
25
|
+
)
|
26
|
+
|
27
|
+
parse_response(response)
|
28
|
+
rescue StandardError => e
|
29
|
+
handle_error(e)
|
30
|
+
end
|
31
|
+
|
32
|
+
private
|
33
|
+
|
34
|
+
def parse_response(response)
|
35
|
+
response.dig("choices", 0, "message", "content")
|
36
|
+
rescue TypeError, NoMethodError => e
|
37
|
+
handle_error(e)
|
38
|
+
end
|
39
|
+
|
40
|
+
def handle_error(error)
|
41
|
+
puts "Error: #{error.message}"
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
@@ -0,0 +1,121 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "rails/railtie"
|
4
|
+
require "digest"
|
5
|
+
|
6
|
+
require_relative "clients/open_ai_client"
|
7
|
+
require_relative "clients/anthropic_client"
|
8
|
+
|
9
|
+
module I18nAi
|
10
|
+
# The Railtie class provides a way to integrate I18nAi into an application
|
11
|
+
class Railtie < Rails::Railtie
|
12
|
+
# Initialize the I18nAi middleware
|
13
|
+
class I18nAiMiddleware
|
14
|
+
def initialize(app)
|
15
|
+
@app = app
|
16
|
+
@client = configure_client
|
17
|
+
@last_checksum = nil
|
18
|
+
end
|
19
|
+
|
20
|
+
def call(env)
|
21
|
+
locales_file = locate_locales_file
|
22
|
+
|
23
|
+
if file_exists?(locales_file)
|
24
|
+
process_locales_file(locales_file)
|
25
|
+
else
|
26
|
+
log_file_not_found
|
27
|
+
end
|
28
|
+
|
29
|
+
@app.call(env)
|
30
|
+
end
|
31
|
+
|
32
|
+
private
|
33
|
+
|
34
|
+
def locate_locales_file
|
35
|
+
Rails.root.join("config", "locales", "en.yml")
|
36
|
+
end
|
37
|
+
|
38
|
+
def file_exists?(file)
|
39
|
+
File.exist?(file)
|
40
|
+
end
|
41
|
+
|
42
|
+
def process_locales_file(file)
|
43
|
+
current_checksum = calculate_checksum(file)
|
44
|
+
log_checksum(current_checksum)
|
45
|
+
|
46
|
+
first_load = @last_checksum.nil?
|
47
|
+
file_changed = current_checksum != @last_checksum
|
48
|
+
log_generate_status(first_load, file_changed)
|
49
|
+
|
50
|
+
return unless first_load || file_changed
|
51
|
+
|
52
|
+
@last_checksum = current_checksum
|
53
|
+
generate_translations(file)
|
54
|
+
end
|
55
|
+
|
56
|
+
def log_checksum(checksum)
|
57
|
+
puts "==> en.yml checksum: #{checksum}"
|
58
|
+
end
|
59
|
+
|
60
|
+
def log_generate_status(first_load, file_changed)
|
61
|
+
puts "==> en.yml generate: #{first_load || file_changed}"
|
62
|
+
end
|
63
|
+
|
64
|
+
def log_file_not_found
|
65
|
+
puts "en.yml file not found"
|
66
|
+
end
|
67
|
+
|
68
|
+
def configure_client
|
69
|
+
config = I18nAi.configuration.ai_settings
|
70
|
+
case config[:provider]
|
71
|
+
when "anthropic"
|
72
|
+
I18nAi::Clients::AnthropicClient.new
|
73
|
+
when "openai"
|
74
|
+
I18nAi::Clients::OpenAiClient.new
|
75
|
+
else
|
76
|
+
raise "Unknown AI provider: #{config[:provider]}"
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
def generate_translations(locales_file)
|
81
|
+
locales = load_locales(locales_file)
|
82
|
+
text_to_translate = locales.to_yaml
|
83
|
+
generate_locales = I18nAi.configuration.generate_locales
|
84
|
+
|
85
|
+
generate_locales.each do |locale|
|
86
|
+
response = translate_locales(locale, text_to_translate)
|
87
|
+
next unless response
|
88
|
+
|
89
|
+
translated_content = extract_translated_content(response)
|
90
|
+
save_translated_locales(locale, translated_content) if translated_content
|
91
|
+
end
|
92
|
+
end
|
93
|
+
|
94
|
+
def load_locales(locales_file)
|
95
|
+
YAML.load_file(locales_file)
|
96
|
+
end
|
97
|
+
|
98
|
+
def translate_locales(locale, text_to_translate)
|
99
|
+
@client.chat(locale, text_to_translate)
|
100
|
+
end
|
101
|
+
|
102
|
+
def extract_translated_content(response)
|
103
|
+
match_data = response.match(/```yaml(.*?)```/m)
|
104
|
+
match_data ? match_data[1].strip : nil
|
105
|
+
end
|
106
|
+
|
107
|
+
def save_translated_locales(locale, translated_content)
|
108
|
+
locales_file = Rails.root.join("config", "locales", "#{locale}.yml")
|
109
|
+
File.write(locales_file, translated_content)
|
110
|
+
end
|
111
|
+
|
112
|
+
def calculate_checksum(file_path)
|
113
|
+
Digest::SHA256.file(file_path).hexdigest
|
114
|
+
end
|
115
|
+
end
|
116
|
+
|
117
|
+
initializer "i18n_ai.configure_middleware", before: :build_middleware_stack do |app|
|
118
|
+
app.middleware.use I18nAiMiddleware
|
119
|
+
end
|
120
|
+
end
|
121
|
+
end
|
data/lib/i18n_ai.rb
ADDED
@@ -0,0 +1,22 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require_relative "i18n_ai/version"
|
4
|
+
require_relative "i18n_ai/railtie"
|
5
|
+
require_relative "i18n_ai/configuration"
|
6
|
+
|
7
|
+
# The I18nAi module provides functionality for integrating AI-based translation services
|
8
|
+
module I18nAi
|
9
|
+
class Error < StandardError; end
|
10
|
+
|
11
|
+
class << self
|
12
|
+
attr_writer :configuration
|
13
|
+
|
14
|
+
def configuration
|
15
|
+
@configuration ||= Configuration.new
|
16
|
+
end
|
17
|
+
|
18
|
+
def configure
|
19
|
+
yield(configuration)
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
metadata
CHANGED
@@ -1,15 +1,49 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: i18n-ai
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- William Estoque
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-08-
|
11
|
+
date: 2024-08-06 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: railties
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - ">="
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: 6.0.0
|
20
|
+
- - "<"
|
21
|
+
- !ruby/object:Gem::Version
|
22
|
+
version: '8'
|
23
|
+
type: :runtime
|
24
|
+
prerelease: false
|
25
|
+
version_requirements: !ruby/object:Gem::Requirement
|
26
|
+
requirements:
|
27
|
+
- - ">="
|
28
|
+
- !ruby/object:Gem::Version
|
29
|
+
version: 6.0.0
|
30
|
+
- - "<"
|
31
|
+
- !ruby/object:Gem::Version
|
32
|
+
version: '8'
|
33
|
+
- !ruby/object:Gem::Dependency
|
34
|
+
name: anthropic
|
35
|
+
requirement: !ruby/object:Gem::Requirement
|
36
|
+
requirements:
|
37
|
+
- - ">="
|
38
|
+
- !ruby/object:Gem::Version
|
39
|
+
version: '0'
|
40
|
+
type: :runtime
|
41
|
+
prerelease: false
|
42
|
+
version_requirements: !ruby/object:Gem::Requirement
|
43
|
+
requirements:
|
44
|
+
- - ">="
|
45
|
+
- !ruby/object:Gem::Version
|
46
|
+
version: '0'
|
13
47
|
- !ruby/object:Gem::Dependency
|
14
48
|
name: ruby-openai
|
15
49
|
requirement: !ruby/object:Gem::Requirement
|
@@ -38,10 +72,13 @@ files:
|
|
38
72
|
- README.md
|
39
73
|
- Rakefile
|
40
74
|
- lib/i18n-ai.rb
|
41
|
-
- lib/
|
42
|
-
- lib/
|
43
|
-
- lib/
|
44
|
-
-
|
75
|
+
- lib/i18n_ai.rb
|
76
|
+
- lib/i18n_ai/clients/anthropic_client.rb
|
77
|
+
- lib/i18n_ai/clients/base_client.rb
|
78
|
+
- lib/i18n_ai/clients/open_ai_client.rb
|
79
|
+
- lib/i18n_ai/configuration.rb
|
80
|
+
- lib/i18n_ai/railtie.rb
|
81
|
+
- lib/i18n_ai/version.rb
|
45
82
|
homepage: https://github.com/narralabs/i18n-ai
|
46
83
|
licenses:
|
47
84
|
- MIT
|
data/lib/i18n-ai/railtie.rb
DELETED
@@ -1,75 +0,0 @@
|
|
1
|
-
require "rails/railtie"
|
2
|
-
require "openai"
|
3
|
-
require "digest"
|
4
|
-
|
5
|
-
module I18nAi
|
6
|
-
class Railtie < Rails::Railtie
|
7
|
-
class I18nAiMiddleware
|
8
|
-
def initialize(app)
|
9
|
-
@app = app
|
10
|
-
@client = OpenAI::Client.new(
|
11
|
-
access_token: ENV.fetch("OPENAI_ACCESS_TOKEN"),
|
12
|
-
log_errors: true
|
13
|
-
)
|
14
|
-
@last_checksum = nil
|
15
|
-
end
|
16
|
-
|
17
|
-
def call(env)
|
18
|
-
locales_file = Rails.root.join("config", "locales", "en.yml")
|
19
|
-
|
20
|
-
if File.exist?(locales_file)
|
21
|
-
current_checksum = calculate_checksum(locales_file)
|
22
|
-
puts "==> en.yml checksum: #{current_checksum}"
|
23
|
-
first_load = @last_checksum.nil?
|
24
|
-
file_changed = current_checksum != @last_checksum
|
25
|
-
puts "==> en.yml generate: #{first_load || file_changed}"
|
26
|
-
|
27
|
-
if first_load || file_changed
|
28
|
-
@last_checksum = current_checksum
|
29
|
-
generate_translations(locales_file)
|
30
|
-
end
|
31
|
-
else
|
32
|
-
puts "en.yml file not found"
|
33
|
-
end
|
34
|
-
|
35
|
-
@app.call(env)
|
36
|
-
end
|
37
|
-
|
38
|
-
private
|
39
|
-
|
40
|
-
def generate_translations(locales_file)
|
41
|
-
locales = YAML.load_file(locales_file)
|
42
|
-
text_to_translate = locales.to_yaml
|
43
|
-
generate_locales = I18nAi.configuration.generate_locales
|
44
|
-
|
45
|
-
generate_locales.each do |locale|
|
46
|
-
# Make a request to OpenAI to translate the locales to the specified locale
|
47
|
-
response = @client.chat(
|
48
|
-
parameters: {
|
49
|
-
model: "gpt-4o-mini",
|
50
|
-
messages: [{ role: "user",
|
51
|
-
content: "Translate the following YAML content to #{locale.to_s.upcase} and make sure to retain the keys in english except the first key which is the 2 letter language code:\n\n#{text_to_translate}" }],
|
52
|
-
max_tokens: 5000
|
53
|
-
}
|
54
|
-
)
|
55
|
-
|
56
|
-
translated_text = response["choices"][0]["message"]["content"]
|
57
|
-
match_data = translated_text.match(/```yaml(.*?)```/m)
|
58
|
-
str = match_data ? match_data[1].strip : nil
|
59
|
-
|
60
|
-
# Save the response to <locale>.yml
|
61
|
-
locales_file = Rails.root.join("config", "locales", "#{locale}.yml")
|
62
|
-
File.write(locales_file, str)
|
63
|
-
end
|
64
|
-
end
|
65
|
-
|
66
|
-
def calculate_checksum(file_path)
|
67
|
-
Digest::SHA256.file(file_path).hexdigest
|
68
|
-
end
|
69
|
-
end
|
70
|
-
|
71
|
-
initializer "i18n_ai.configure_middleware", before: :build_middleware_stack do |app|
|
72
|
-
app.middleware.use I18nAiMiddleware
|
73
|
-
end
|
74
|
-
end
|
75
|
-
end
|