fastlane-plugin-translate_gpt 0.1.9 → 0.1.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 954eb550c534e78643e493d55a44aad05a28ea4545a7f965b393efeec7d8113b
4
- data.tar.gz: ca96f089100daaa43b0ed5942d03be22658a1ae0fecf7a24062eb7a4cb51dd11
3
+ metadata.gz: 853d06eecad9456f758e4dbdf4e766345d4a266d5d199608112c0947077dbee1
4
+ data.tar.gz: 7c3f0d6cda2bd029dcd0d5bb8313cc20c8bab024c6786571808fc9fd511af6c8
5
5
  SHA512:
6
- metadata.gz: bb6ed4b1b44fd5c97ce495524ea644911d55d0bec333611c53be68c9aba8becb8687c4d5bce0fa228bee2c48fd3bec227729664afff0e742073cd8de2f7a6423
7
- data.tar.gz: 1a1faac3623a8795115112629db8d0f35399bbee0c1b1295453f9236ef1c7b9cc7a4f26f855a5029f8d1f7fbd007e7a568c1779bacb8f84e262353ada3c28095
6
+ metadata.gz: 0f9ed56771f0e68f22c6325c2b6829036f38e31591d6d0c2b0b6dbc6ba1ff72665ed857daca1be73327f0e97b71e4c53b93d9449a9f0a81e9e1973da4b826a74
7
+ data.tar.gz: cb4812f57b1fa109a52534311bcd3d414d30f4c181ea3d927912ea0f810f86d9dc1a3a88b53f2afcf0aefd784e3d1201d66988518dae8437d1f12ac30229d3c3
data/README.md CHANGED
@@ -54,6 +54,7 @@ The following options are available for `translate-gpt`:
54
54
  | `target_file` | The path to the output file for the translated strings. | `GPT_TARGET_FILE` |
55
55
  | `context` | Common context for the translation | `GPT_COMMON_CONTEXT` |
56
56
  | `bunch_size` | Number of strings to translate in a single request.| `GPT_BUNCH_SIZE` |
57
+ | `max_input_tokens` | Maximum number of tokens in the input string. `bunch_size` will be ignored | `GPT_MAX_INPUT_TOKENS` |
57
58
  | `mark_for_review` | If string has been translated by GPT, mark it for review | `GPT_MARK_FOR_REVIEW` |
58
59
 
59
60
  **Note:** __I advise using `bunch_size`. It will reduce the number of API requests and translations will be more accurate.__
@@ -1,5 +1,4 @@
1
1
  require 'fastlane/action'
2
- require 'openai'
3
2
  require_relative '../helper/translate_gpt_helper'
4
3
  require 'loco_strings'
5
4
 
@@ -10,8 +9,11 @@ module Fastlane
10
9
  helper = Helper::TranslateGptHelper.new(params)
11
10
  helper.prepare_hashes
12
11
  bunch_size = params[:bunch_size]
12
+ max_input_tokens = params[:max_input_tokens]
13
13
  helper.log_input(bunch_size)
14
- if bunch_size.nil? || bunch_size < 1
14
+ if !max_input_tokens.nil? && max_input_tokens > 0
15
+ helper.translate_bunch_with_tokenizer(max_input_tokens)
16
+ elsif bunch_size.nil? || bunch_size < 1
15
17
  helper.translate_strings
16
18
  else
17
19
  helper.translate_bunch_of_strings(bunch_size)
@@ -118,6 +120,13 @@ module Fastlane
118
120
  optional: true,
119
121
  type: Integer
120
122
  ),
123
+ FastlaneCore::ConfigItem.new(
124
+ key: :max_input_tokens,
125
+ env_name: 'GPT_MAX_INPUT_TOKENS',
126
+ description: 'Maximum number of tokens in the input request',
127
+ type: Integer,
128
+ optional: true
129
+ ),
121
130
  FastlaneCore::ConfigItem.new(
122
131
  key: :mark_for_review,
123
132
  env_name: 'GPT_MARK_FOR_REVIEW',
@@ -125,7 +134,7 @@ module Fastlane
125
134
  type: Boolean,
126
135
  optional: true,
127
136
  default_value: false
128
- )
137
+ ),
129
138
  ]
130
139
  end
131
140
 
@@ -1,6 +1,7 @@
1
1
  require 'fastlane_core/ui/ui'
2
2
  require 'loco_strings/parsers/xcstrings_file'
3
3
  require 'json'
4
+ require 'openai'
4
5
  # rubocop:disable all
5
6
 
6
7
  module Fastlane
@@ -106,32 +107,78 @@ module Fastlane
106
107
  number_of_bunches = (@translation_count / bunch_size.to_f).ceil
107
108
  @keys_associations = {}
108
109
  @to_translate.each_slice(bunch_size) do |bunch|
109
- prompt = prepare_bunch_prompt bunch
110
- if prompt.empty?
111
- UI.important "Empty prompt, skipping bunch"
110
+ begin
111
+ progress = (bunch_index / number_of_bunches.to_f * 100).round
112
+ request_bunch(bunch, progress)
113
+ bunch_index += 1
114
+ rescue "Empty prompt"
112
115
  next
113
116
  end
114
- max_retries = 10
115
- times_retried = 0
117
+ if bunch_index < number_of_bunches - 1 then wait end
118
+ end
119
+ end
116
120
 
117
- # translate the source string to the target language
118
- begin
119
- request_bunch_translate(bunch, prompt, bunch_index, number_of_bunches)
120
- bunch_index += 1
121
- rescue Net::ReadTimeout => error
122
- if times_retried < max_retries
123
- times_retried += 1
124
- UI.important "Failed to request translation, retry #{times_retried}/#{max_retries}"
125
- wait 1
126
- retry
127
- else
128
- UI.error "Can't translate the bunch: #{error}"
121
+ def translate_bunch_with_tokenizer(max_tokens)
122
+ string_index = 0
123
+ @keys_associations = {}
124
+ current_bunch = {}
125
+ @to_translate.each do |key, string|
126
+ string_index += 1
127
+ tmp_bunch = current_bunch.clone
128
+ tmp_bunch[key] = string
129
+
130
+ prompt = prepare_bunch_prompt tmp_bunch
131
+ tokens = OpenAI.rough_token_count(prompt)
132
+ if tokens > max_tokens
133
+ if current_bunch.empty?
134
+ string_index -= 1
135
+ UI.error "Can't translate #{key}: string is too long"
136
+ next
129
137
  end
138
+ prompt = prepare_bunch_prompt current_bunch
139
+ progress = (string_index / @translation_count.to_f * 100).round
140
+ request_bunch(bunch, progress)
141
+ current_bunch = {}
142
+ current_bunch[key] = string
143
+ if progress < 100 then wait end
144
+ else
145
+ current_bunch = tmp_bunch
130
146
  end
131
- if bunch_index < number_of_bunches - 1 then wait end
132
147
  end
148
+
149
+ if !current_bunch.empty?
150
+ prompt = prepare_bunch_prompt current_bunch
151
+ progress = (string_index / @translation_count.to_f * 100).round
152
+ request_bunch(current_bunch, progress)
153
+ end
154
+
133
155
  end
134
156
 
157
+ def request_bunch(bunch, progress)
158
+ UI.message "[#{progress}%] Translating #{bunch.size} strings..."
159
+ prompt = prepare_bunch_prompt bunch
160
+ if prompt.empty?
161
+ UI.important "Empty prompt, skipping bunch"
162
+ raise "Empty prompt"
163
+ end
164
+ max_retries = 10
165
+ times_retried = 0
166
+
167
+ # translate the source string to the target language
168
+ begin
169
+ request_bunch_translate(bunch, prompt, progress)
170
+ rescue Net::ReadTimeout => error
171
+ if times_retried < max_retries
172
+ times_retried += 1
173
+ UI.important "Failed to request translation, retry #{times_retried}/#{max_retries}"
174
+ wait 1
175
+ retry
176
+ else
177
+ UI.error "Can't translate the bunch: #{error}"
178
+ end
179
+ end
180
+ end
181
+
135
182
  # Prepare the prompt for the GPT API
136
183
  def prepare_prompt(string)
137
184
  prompt = "I want you to act as a translator for a mobile application strings. " + \
@@ -160,7 +207,7 @@ module Fastlane
160
207
 
161
208
  json_hash = []
162
209
  strings.each do |key, string|
163
- UI.message "Translating #{key} - #{string.value}"
210
+ # UI.message "Translating #{key} - #{string.value}"
164
211
  next if string.nil?
165
212
 
166
213
  string_hash = {}
@@ -228,7 +275,7 @@ module Fastlane
228
275
  end
229
276
  end
230
277
 
231
- def request_bunch_translate(strings, prompt, index, number_of_bunches)
278
+ def request_bunch_translate(strings, prompt, progress)
232
279
  response = @client.chat(
233
280
  parameters: {
234
281
  model: @params[:model_name],
@@ -242,7 +289,7 @@ module Fastlane
242
289
  error = response.dig("error", "message")
243
290
 
244
291
  #key_log = Colorizer::colorize(key, :blue)
245
- index_log = Colorizer::colorize("[#{index + 1}/#{number_of_bunches}]", :white)
292
+ index_log = Colorizer::colorize("[#{progress}%]", :white)
246
293
  if error
247
294
  UI.error "#{index_log} Error translating: #{error}"
248
295
  else
@@ -1,5 +1,5 @@
1
1
  module Fastlane
2
2
  module TranslateGpt
3
- VERSION = '0.1.9'
3
+ VERSION = '0.1.10'
4
4
  end
5
5
  end
metadata CHANGED
@@ -1,14 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fastlane-plugin-translate_gpt
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.9
4
+ version: 0.1.10
5
5
  platform: ruby
6
6
  authors:
7
7
  - Aleksei Cherepanov
8
- autorequire:
9
8
  bindir: bin
10
9
  cert_chain: []
11
- date: 2025-01-27 00:00:00.000000000 Z
10
+ date: 2025-02-09 00:00:00.000000000 Z
12
11
  dependencies:
13
12
  - !ruby/object:Gem::Dependency
14
13
  name: loco_strings
@@ -30,14 +29,14 @@ dependencies:
30
29
  requirements:
31
30
  - - "~>"
32
31
  - !ruby/object:Gem::Version
33
- version: '3.7'
32
+ version: 7.3.1
34
33
  type: :runtime
35
34
  prerelease: false
36
35
  version_requirements: !ruby/object:Gem::Requirement
37
36
  requirements:
38
37
  - - "~>"
39
38
  - !ruby/object:Gem::Version
40
- version: '3.7'
39
+ version: 7.3.1
41
40
  - !ruby/object:Gem::Dependency
42
41
  name: bundler
43
42
  requirement: !ruby/object:Gem::Requirement
@@ -178,7 +177,6 @@ dependencies:
178
177
  - - ">="
179
178
  - !ruby/object:Gem::Version
180
179
  version: '0'
181
- description:
182
180
  email: ftp27host@gmail.com
183
181
  executables: []
184
182
  extensions: []
@@ -197,7 +195,6 @@ metadata:
197
195
  homepage_uri: https://github.com/ftp27/fastlane-plugin-translate_gpt
198
196
  source_code_uri: https://github.com/ftp27/fastlane-plugin-translate_gpt
199
197
  github_repo: https://github.com/ftp27/fastlane-plugin-translate_gpt
200
- post_install_message:
201
198
  rdoc_options: []
202
199
  require_paths:
203
200
  - lib
@@ -212,8 +209,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
212
209
  - !ruby/object:Gem::Version
213
210
  version: '0'
214
211
  requirements: []
215
- rubygems_version: 3.4.19
216
- signing_key:
212
+ rubygems_version: 3.6.2
217
213
  specification_version: 4
218
214
  summary: This fastlane plugin provides an easy way to use the OpenAI GPT language
219
215
  model to translate strings in your iOS application.