fastlane-plugin-translate_gpt 0.1.4 → 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: c370098458b2bc0d453de5101fa0a1f3273947cd41bc953bb94cf74f5ad58d6f
|
4
|
+
data.tar.gz: 96cccf7814b8796119d08340cca72089dea6c48a177df111b52bb2b1196ee756
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 5d07f6a7c6a9351b633781d53ddd40e1a432a62bd270d9df8223d40d33b8ae5034123b430db52498a97c462baf5841f7d828df582a9e3cfd4767d83e064ef7ff
|
7
|
+
data.tar.gz: e5e90093831fa83665f98b302ca433f5e04e5765f74c6a0a0fd5972fcad3dbaf690657e0136acf7ab0f49f253117a5b4deb6ab23b5e209507871b1376d59de2f
|
data/README.md
CHANGED
@@ -53,6 +53,9 @@ The following options are available for `translate-gpt`:
|
|
53
53
|
| `source_file` | The path to the `Localizable.strings` or `strings.xml` file to be translated. | `GPT_SOURCE_FILE` |
|
54
54
|
| `target_file` | The path to the output file for the translated strings. | `GPT_TARGET_FILE` |
|
55
55
|
| `context` | Common context for the translation | `GPT_COMMON_CONTEXT` |
|
56
|
+
| `bunch_size` | Number of strings to translate in a single request.| `GPT_BUNCH_SIZE` |
|
57
|
+
|
58
|
+
**Note:** __I advise using `bunch_size`. It will reduce the number of API requests and translations will be more accurate.__
|
56
59
|
|
57
60
|
## Providing context
|
58
61
|
|
@@ -9,8 +9,13 @@ module Fastlane
|
|
9
9
|
def self.run(params)
|
10
10
|
helper = Helper::TranslateGptHelper.new(params)
|
11
11
|
helper.prepare_hashes()
|
12
|
-
|
13
|
-
helper.
|
12
|
+
bunch_size = params[:bunch_size]
|
13
|
+
helper.log_input(bunch_size)
|
14
|
+
if bunch_size.nil? || bunch_size < 1
|
15
|
+
helper.translate_strings()
|
16
|
+
else
|
17
|
+
helper.translate_bunch_of_strings(bunch_size)
|
18
|
+
end
|
14
19
|
helper.write_output()
|
15
20
|
end
|
16
21
|
|
@@ -97,7 +102,14 @@ module Fastlane
|
|
97
102
|
description: "Common context for the translation",
|
98
103
|
optional: true,
|
99
104
|
type: String
|
100
|
-
)
|
105
|
+
),
|
106
|
+
FastlaneCore::ConfigItem.new(
|
107
|
+
key: :bunch_size,
|
108
|
+
env_name: "GPT_BUNCH_SIZE",
|
109
|
+
description: "Number of strings to translate in a single request",
|
110
|
+
optional: true,
|
111
|
+
type: Integer
|
112
|
+
),
|
101
113
|
]
|
102
114
|
end
|
103
115
|
|
@@ -1,5 +1,6 @@
|
|
1
1
|
require 'fastlane_core/ui/ui'
|
2
2
|
require 'loco_strings'
|
3
|
+
require 'json'
|
3
4
|
|
4
5
|
module Fastlane
|
5
6
|
UI = FastlaneCore::UI unless Fastlane.const_defined?("UI")
|
@@ -23,13 +24,17 @@ module Fastlane
|
|
23
24
|
end
|
24
25
|
|
25
26
|
# Log information about the input strings
|
26
|
-
def log_input()
|
27
|
+
def log_input(bunch_size)
|
27
28
|
@translation_count = @to_translate.size
|
28
29
|
number_of_strings = Colorizer::colorize("#{@translation_count}", :blue)
|
29
30
|
UI.message "Translating #{number_of_strings} strings..."
|
30
|
-
if
|
31
|
+
if bunch_size.nil? || bunch_size < 1
|
31
32
|
estimated_string = Colorizer::colorize("#{@translation_count * @params[:request_timeout]}", :white)
|
32
33
|
UI.message "Estimated time: #{estimated_string} seconds"
|
34
|
+
else
|
35
|
+
number_of_bunches = (@translation_count / bunch_size.to_f).ceil
|
36
|
+
estimated_string = Colorizer::colorize("#{number_of_bunches * @params[:request_timeout]}", :white)
|
37
|
+
UI.message "Estimated time: #{estimated_string} seconds"
|
33
38
|
end
|
34
39
|
end
|
35
40
|
|
@@ -58,6 +63,32 @@ module Fastlane
|
|
58
63
|
end
|
59
64
|
end
|
60
65
|
|
66
|
+
def translate_bunch_of_strings(bunch_size)
|
67
|
+
bunch_index = 0
|
68
|
+
number_of_bunches = (@translation_count / bunch_size.to_f).ceil
|
69
|
+
@to_translate.each_slice(bunch_size) do |bunch|
|
70
|
+
prompt = prepare_bunch_prompt bunch
|
71
|
+
max_retries = 10
|
72
|
+
times_retried = 0
|
73
|
+
|
74
|
+
# translate the source string to the target language
|
75
|
+
begin
|
76
|
+
request_bunch_translate(bunch, prompt, bunch_index, number_of_bunches)
|
77
|
+
bunch_index += 1
|
78
|
+
rescue Net::ReadTimeout => error
|
79
|
+
if times_retried < max_retries
|
80
|
+
times_retried += 1
|
81
|
+
UI.important "Failed to request translation, retry #{times_retried}/#{max_retries}"
|
82
|
+
wait 1
|
83
|
+
retry
|
84
|
+
else
|
85
|
+
UI.error "Can't translate #{key}: #{error}"
|
86
|
+
end
|
87
|
+
end
|
88
|
+
if bunch_index < number_of_bunches - 1 then wait end
|
89
|
+
end
|
90
|
+
end
|
91
|
+
|
61
92
|
# Prepare the prompt for the GPT API
|
62
93
|
def prepare_prompt(string)
|
63
94
|
prompt = "I want you to act as a translator for a mobile application strings. " + \
|
@@ -75,6 +106,32 @@ module Fastlane
|
|
75
106
|
return prompt
|
76
107
|
end
|
77
108
|
|
109
|
+
def prepare_bunch_prompt(strings)
|
110
|
+
prompt = "I want you to act as a translator for a mobile application strings. " + \
|
111
|
+
"Try to keep length of the translated text. " + \
|
112
|
+
"You need to answer only with the translation and nothing else until I say to stop it."
|
113
|
+
if @params[:context] && !@params[:context].empty?
|
114
|
+
prompt += "This app is #{@params[:context]}. "
|
115
|
+
end
|
116
|
+
prompt += "Translate next text from #{@params[:source_language]} to #{@params[:target_language]}:\n"
|
117
|
+
|
118
|
+
json_hash = []
|
119
|
+
strings.each do |key, string|
|
120
|
+
string_hash = {}
|
121
|
+
context = string.comment
|
122
|
+
if context && !context.empty?
|
123
|
+
string_hash["context"] = context
|
124
|
+
end
|
125
|
+
string_hash["key"] = string.key
|
126
|
+
string_hash["string_to_translate"] = string.value
|
127
|
+
json_hash << string_hash
|
128
|
+
end
|
129
|
+
prompt += "'''\n"
|
130
|
+
prompt += json_hash.to_json
|
131
|
+
prompt += "\n'''"
|
132
|
+
return prompt
|
133
|
+
end
|
134
|
+
|
78
135
|
# Request a translation from the GPT API
|
79
136
|
def request_translate(key, string, prompt, index)
|
80
137
|
response = @client.chat(
|
@@ -104,6 +161,48 @@ module Fastlane
|
|
104
161
|
end
|
105
162
|
end
|
106
163
|
|
164
|
+
def request_bunch_translate(strings, prompt, index, number_of_bunches)
|
165
|
+
response = @client.chat(
|
166
|
+
parameters: {
|
167
|
+
model: @params[:model_name],
|
168
|
+
messages: [
|
169
|
+
{ role: "user", content: prompt }
|
170
|
+
],
|
171
|
+
temperature: @params[:temperature],
|
172
|
+
}
|
173
|
+
)
|
174
|
+
# extract the translated string from the response
|
175
|
+
error = response.dig("error", "message")
|
176
|
+
|
177
|
+
#key_log = Colorizer::colorize(key, :blue)
|
178
|
+
index_log = Colorizer::colorize("[#{index + 1}/#{number_of_bunches}]", :white)
|
179
|
+
if error
|
180
|
+
UI.error "#{index_log} Error translating: #{error}"
|
181
|
+
else
|
182
|
+
target_string = response.dig("choices", 0, "message", "content")
|
183
|
+
json_string = target_string[/\[[^\[\]]*\]/m]
|
184
|
+
json_hash = JSON.parse(json_string)
|
185
|
+
keys_to_translate = json_hash.map { |string_hash| string_hash["key"] }
|
186
|
+
json_hash.each do |string_hash|
|
187
|
+
key = string_hash["key"]
|
188
|
+
context = string_hash["context"]
|
189
|
+
string_hash.delete("key")
|
190
|
+
string_hash.delete("context")
|
191
|
+
translated_string = string_hash.values.first
|
192
|
+
if key && !key.empty? && translated_string && !translated_string.empty?
|
193
|
+
UI.message "#{index_log} Translating #{key} - #{translated_string}"
|
194
|
+
string = LocoStrings::LocoString.new(key, translated_string, context)
|
195
|
+
@output_hash[key] = string
|
196
|
+
keys_to_translate.delete(key)
|
197
|
+
end
|
198
|
+
end
|
199
|
+
|
200
|
+
if keys_to_translate.length > 0
|
201
|
+
UI.important "#{index_log} Unable to translate #{keys_to_translate.join(", ")}"
|
202
|
+
end
|
203
|
+
end
|
204
|
+
end
|
205
|
+
|
107
206
|
# Write the translated strings to the target file
|
108
207
|
def write_output()
|
109
208
|
number_of_strings = Colorizer::colorize("#{@output_hash.size}", :blue)
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fastlane-plugin-translate_gpt
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.5
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Aleksei Cherepanov
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2023-
|
11
|
+
date: 2023-07-02 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: ruby-openai
|
@@ -193,7 +193,10 @@ files:
|
|
193
193
|
homepage: https://github.com/ftp27/fastlane-plugin-translate_gpt
|
194
194
|
licenses:
|
195
195
|
- MIT
|
196
|
-
metadata:
|
196
|
+
metadata:
|
197
|
+
homepage_uri: https://github.com/ftp27/fastlane-plugin-translate_gpt
|
198
|
+
source_code_uri: https://github.com/ftp27/fastlane-plugin-translate_gpt
|
199
|
+
github_repo: https://github.com/ftp27/fastlane-plugin-translate_gpt
|
197
200
|
post_install_message:
|
198
201
|
rdoc_options: []
|
199
202
|
require_paths:
|