fastlane-plugin-translate_gpt 0.1.8.2 → 0.1.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b892379c42264401fe57500783b0b8ab2ad3b84ab81a5cd68e0e3c16250f74e1
4
- data.tar.gz: 35e9901ddf7a0435cebaefd55393e45800731f4a5eaba81b5c8d49e848cec070
3
+ metadata.gz: 853d06eecad9456f758e4dbdf4e766345d4a266d5d199608112c0947077dbee1
4
+ data.tar.gz: 7c3f0d6cda2bd029dcd0d5bb8313cc20c8bab024c6786571808fc9fd511af6c8
5
5
  SHA512:
6
- metadata.gz: 83d0ba21ea5b3df1ae37084401525638efc6ed04535b460a204e90d572208eace9717e7bcc0252806b7fa74a00ed9d8479d8bdb0e5ba7899453cfa523f17c067
7
- data.tar.gz: 03527622c722c86560a2d386053dabd1740087e57639c96cb622f3146fcd3b258254db95c420a970e2d9636310621c6011486832b14e4d59eca72e1fb14a7989
6
+ metadata.gz: 0f9ed56771f0e68f22c6325c2b6829036f38e31591d6d0c2b0b6dbc6ba1ff72665ed857daca1be73327f0e97b71e4c53b93d9449a9f0a81e9e1973da4b826a74
7
+ data.tar.gz: cb4812f57b1fa109a52534311bcd3d414d30f4c181ea3d927912ea0f810f86d9dc1a3a88b53f2afcf0aefd784e3d1201d66988518dae8437d1f12ac30229d3c3
data/README.md CHANGED
@@ -54,6 +54,8 @@ The following options are available for `translate-gpt`:
54
54
  | `target_file` | The path to the output file for the translated strings. | `GPT_TARGET_FILE` |
55
55
  | `context` | Common context for the translation | `GPT_COMMON_CONTEXT` |
56
56
  | `bunch_size` | Number of strings to translate in a single request.| `GPT_BUNCH_SIZE` |
57
+ | `max_input_tokens` | Maximum number of tokens in the input string. `bunch_size` will be ignored | `GPT_MAX_INPUT_TOKENS` |
58
+ | `mark_for_review` | If string has been translated by GPT, mark it for review | `GPT_MARK_FOR_REVIEW` |
57
59
 
58
60
  **Note:** __I advise using `bunch_size`. It will reduce the number of API requests and translations will be more accurate.__
59
61
 
@@ -1,5 +1,4 @@
1
1
  require 'fastlane/action'
2
- require 'openai'
3
2
  require_relative '../helper/translate_gpt_helper'
4
3
  require 'loco_strings'
5
4
 
@@ -8,15 +7,18 @@ module Fastlane
8
7
  class TranslateGptAction < Action
9
8
  def self.run(params)
10
9
  helper = Helper::TranslateGptHelper.new(params)
11
- helper.prepare_hashes()
12
- bunch_size = params[:bunch_size]
10
+ helper.prepare_hashes
11
+ bunch_size = params[:bunch_size]
12
+ max_input_tokens = params[:max_input_tokens]
13
13
  helper.log_input(bunch_size)
14
- if bunch_size.nil? || bunch_size < 1
15
- helper.translate_strings()
16
- else
14
+ if !max_input_tokens.nil? && max_input_tokens > 0
15
+ helper.translate_bunch_with_tokenizer(max_input_tokens)
16
+ elsif bunch_size.nil? || bunch_size < 1
17
+ helper.translate_strings
18
+ else
17
19
  helper.translate_bunch_of_strings(bunch_size)
18
20
  end
19
- helper.write_output()
21
+ helper.write_output
20
22
  end
21
23
 
22
24
  #####################################################
@@ -31,89 +33,108 @@ module Fastlane
31
33
  [
32
34
  FastlaneCore::ConfigItem.new(
33
35
  key: :api_token,
34
- env_name: "GPT_API_KEY",
35
- description: "API token for ChatGPT",
36
+ env_name: 'GPT_API_KEY',
37
+ description: 'API token for ChatGPT',
36
38
  sensitive: true,
37
39
  code_gen_sensitive: true,
38
- default_value: ""
40
+ default_value: ''
39
41
  ),
40
42
  FastlaneCore::ConfigItem.new(
41
43
  key: :model_name,
42
- env_name: "GPT_MODEL_NAME",
43
- description: "Name of the ChatGPT model to use",
44
- default_value: "gpt-3.5-turbo"
44
+ env_name: 'GPT_MODEL_NAME',
45
+ description: 'Name of the ChatGPT model to use',
46
+ default_value: 'gpt-3.5-turbo'
45
47
  ),
46
48
  FastlaneCore::ConfigItem.new(
47
49
  key: :request_timeout,
48
- env_name: "GPT_REQUEST_TIMEOUT",
49
- description: "Timeout for the request in seconds",
50
+ env_name: 'GPT_REQUEST_TIMEOUT',
51
+ description: 'Timeout for the request in seconds',
50
52
  type: Integer,
51
53
  default_value: 30
52
54
  ),
53
55
  FastlaneCore::ConfigItem.new(
54
56
  key: :temperature,
55
- env_name: "GPT_TEMPERATURE",
56
- description: "What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic",
57
+ env_name: 'GPT_TEMPERATURE',
58
+ description: 'What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic',
57
59
  type: Float,
58
60
  optional: true,
59
61
  default_value: 0.5
60
62
  ),
61
63
  FastlaneCore::ConfigItem.new(
62
64
  key: :skip_translated,
63
- env_name: "GPT_SKIP_TRANSLATED",
64
- description: "Whether to skip strings that have already been translated",
65
+ env_name: 'GPT_SKIP_TRANSLATED',
66
+ description: 'Whether to skip strings that have already been translated',
65
67
  type: Boolean,
66
68
  optional: true,
67
69
  default_value: true
68
70
  ),
69
71
  FastlaneCore::ConfigItem.new(
70
72
  key: :source_language,
71
- env_name: "GPT_SOURCE_LANGUAGE",
72
- description: "Source language to translate from",
73
- default_value: "auto"
73
+ env_name: 'GPT_SOURCE_LANGUAGE',
74
+ description: 'Source language to translate from',
75
+ default_value: 'auto'
74
76
  ),
75
77
  FastlaneCore::ConfigItem.new(
76
78
  key: :target_language,
77
- env_name: "GPT_TARGET_LANGUAGE",
78
- description: "Target language to translate to",
79
- default_value: "en"
79
+ env_name: 'GPT_TARGET_LANGUAGE',
80
+ description: 'Target language to translate to',
81
+ default_value: 'en'
80
82
  ),
81
83
  FastlaneCore::ConfigItem.new(
82
84
  key: :source_file,
83
- env_name: "GPT_SOURCE_FILE",
84
- description: "The path to the Localizable.strings file to be translated",
85
+ env_name: 'GPT_SOURCE_FILE',
86
+ description: 'The path to the Localizable.strings file to be translated',
85
87
  verify_block: proc do |value|
86
88
  UI.user_error!("Invalid file path: #{value}") unless File.exist?(value)
87
89
  extension = File.extname(value)
88
- available_extensions = [".strings", ".xcstrings"]
89
- UI.user_error!("Translation file must have any of these extensions: #{available_extensions}") unless available_extensions.include? extension
90
+ available_extensions = ['.strings', '.xcstrings']
91
+ unless available_extensions.include? extension
92
+ UI.user_error!("Translation file must have any of these extensions: #{available_extensions}")
93
+ end
90
94
  end
91
95
  ),
92
96
  FastlaneCore::ConfigItem.new(
93
97
  key: :target_file,
94
- env_name: "GPT_TARGET_FILE",
95
- description: "Path to the translation file to update",
98
+ env_name: 'GPT_TARGET_FILE',
99
+ description: 'Path to the translation file to update',
96
100
  verify_block: proc do |value|
97
101
  UI.user_error!("Invalid file path: #{value}") unless File.exist?(value)
98
102
  extension = File.extname(value)
99
- available_extensions = [".strings", ".xcstrings"]
100
- UI.user_error!("Translation file must have any of these extensions: #{available_extensions}") unless available_extensions.include? extension
103
+ available_extensions = ['.strings', '.xcstrings']
104
+ unless available_extensions.include? extension
105
+ UI.user_error!("Translation file must have any of these extensions: #{available_extensions}")
106
+ end
101
107
  end
102
- ),
108
+ ),
103
109
  FastlaneCore::ConfigItem.new(
104
110
  key: :context,
105
- env_name: "GPT_COMMON_CONTEXT",
106
- description: "Common context for the translation",
111
+ env_name: 'GPT_COMMON_CONTEXT',
112
+ description: 'Common context for the translation',
107
113
  optional: true,
108
114
  type: String
109
- ),
115
+ ),
110
116
  FastlaneCore::ConfigItem.new(
111
117
  key: :bunch_size,
112
- env_name: "GPT_BUNCH_SIZE",
113
- description: "Number of strings to translate in a single request",
118
+ env_name: 'GPT_BUNCH_SIZE',
119
+ description: 'Number of strings to translate in a single request',
114
120
  optional: true,
115
121
  type: Integer
116
- ),
122
+ ),
123
+ FastlaneCore::ConfigItem.new(
124
+ key: :max_input_tokens,
125
+ env_name: 'GPT_MAX_INPUT_TOKENS',
126
+ description: 'Maximum number of tokens in the input request',
127
+ type: Integer,
128
+ optional: true
129
+ ),
130
+ FastlaneCore::ConfigItem.new(
131
+ key: :mark_for_review,
132
+ env_name: 'GPT_MARK_FOR_REVIEW',
133
+ description: 'If string has been translated by GPT, mark it for review',
134
+ type: Boolean,
135
+ optional: true,
136
+ default_value: false
137
+ ),
117
138
  ]
118
139
  end
119
140
 
@@ -128,15 +149,15 @@ module Fastlane
128
149
  def self.return_value
129
150
  # This action doesn't return any specific value, so we return nil
130
151
  nil
131
- end
152
+ end
132
153
 
133
154
  def self.authors
134
- ["ftp27"]
155
+ ['ftp27']
135
156
  end
136
157
 
137
158
  def self.is_supported?(platform)
138
- [:ios, :mac].include?(platform)
139
- end
159
+ %i[ios mac].include?(platform)
160
+ end
140
161
  end
141
162
  end
142
163
  end
@@ -1,6 +1,7 @@
1
1
  require 'fastlane_core/ui/ui'
2
2
  require 'loco_strings/parsers/xcstrings_file'
3
3
  require 'json'
4
+ require 'openai'
4
5
  # rubocop:disable all
5
6
 
6
7
  module Fastlane
@@ -106,32 +107,78 @@ module Fastlane
106
107
  number_of_bunches = (@translation_count / bunch_size.to_f).ceil
107
108
  @keys_associations = {}
108
109
  @to_translate.each_slice(bunch_size) do |bunch|
109
- prompt = prepare_bunch_prompt bunch
110
- if prompt.empty?
111
- UI.important "Empty prompt, skipping bunch"
110
+ begin
111
+ progress = (bunch_index / number_of_bunches.to_f * 100).round
112
+ request_bunch(bunch, progress)
113
+ bunch_index += 1
114
+ rescue "Empty prompt"
112
115
  next
113
116
  end
114
- max_retries = 10
115
- times_retried = 0
117
+ if bunch_index < number_of_bunches - 1 then wait end
118
+ end
119
+ end
116
120
 
117
- # translate the source string to the target language
118
- begin
119
- request_bunch_translate(bunch, prompt, bunch_index, number_of_bunches)
120
- bunch_index += 1
121
- rescue Net::ReadTimeout => error
122
- if times_retried < max_retries
123
- times_retried += 1
124
- UI.important "Failed to request translation, retry #{times_retried}/#{max_retries}"
125
- wait 1
126
- retry
127
- else
128
- UI.error "Can't translate the bunch: #{error}"
121
+ def translate_bunch_with_tokenizer(max_tokens)
122
+ string_index = 0
123
+ @keys_associations = {}
124
+ current_bunch = {}
125
+ @to_translate.each do |key, string|
126
+ string_index += 1
127
+ tmp_bunch = current_bunch.clone
128
+ tmp_bunch[key] = string
129
+
130
+ prompt = prepare_bunch_prompt tmp_bunch
131
+ tokens = OpenAI.rough_token_count(prompt)
132
+ if tokens > max_tokens
133
+ if current_bunch.empty?
134
+ string_index -= 1
135
+ UI.error "Can't translate #{key}: string is too long"
136
+ next
129
137
  end
138
+ prompt = prepare_bunch_prompt current_bunch
139
+ progress = (string_index / @translation_count.to_f * 100).round
140
+ request_bunch(bunch, progress)
141
+ current_bunch = {}
142
+ current_bunch[key] = string
143
+ if progress < 100 then wait end
144
+ else
145
+ current_bunch = tmp_bunch
130
146
  end
131
- if bunch_index < number_of_bunches - 1 then wait end
132
147
  end
148
+
149
+ if !current_bunch.empty?
150
+ prompt = prepare_bunch_prompt current_bunch
151
+ progress = (string_index / @translation_count.to_f * 100).round
152
+ request_bunch(current_bunch, progress)
153
+ end
154
+
133
155
  end
134
156
 
157
+ def request_bunch(bunch, progress)
158
+ UI.message "[#{progress}%] Translating #{bunch.size} strings..."
159
+ prompt = prepare_bunch_prompt bunch
160
+ if prompt.empty?
161
+ UI.important "Empty prompt, skipping bunch"
162
+ raise "Empty prompt"
163
+ end
164
+ max_retries = 10
165
+ times_retried = 0
166
+
167
+ # translate the source string to the target language
168
+ begin
169
+ request_bunch_translate(bunch, prompt, progress)
170
+ rescue Net::ReadTimeout => error
171
+ if times_retried < max_retries
172
+ times_retried += 1
173
+ UI.important "Failed to request translation, retry #{times_retried}/#{max_retries}"
174
+ wait 1
175
+ retry
176
+ else
177
+ UI.error "Can't translate the bunch: #{error}"
178
+ end
179
+ end
180
+ end
181
+
135
182
  # Prepare the prompt for the GPT API
136
183
  def prepare_prompt(string)
137
184
  prompt = "I want you to act as a translator for a mobile application strings. " + \
@@ -160,7 +207,7 @@ module Fastlane
160
207
 
161
208
  json_hash = []
162
209
  strings.each do |key, string|
163
- UI.message "Translating #{key} - #{string}"
210
+ # UI.message "Translating #{key} - #{string.value}"
164
211
  next if string.nil?
165
212
 
166
213
  string_hash = {}
@@ -228,7 +275,7 @@ module Fastlane
228
275
  end
229
276
  end
230
277
 
231
- def request_bunch_translate(strings, prompt, index, number_of_bunches)
278
+ def request_bunch_translate(strings, prompt, progress)
232
279
  response = @client.chat(
233
280
  parameters: {
234
281
  model: @params[:model_name],
@@ -242,7 +289,7 @@ module Fastlane
242
289
  error = response.dig("error", "message")
243
290
 
244
291
  #key_log = Colorizer::colorize(key, :blue)
245
- index_log = Colorizer::colorize("[#{index + 1}/#{number_of_bunches}]", :white)
292
+ index_log = Colorizer::colorize("[#{progress}%]", :white)
246
293
  if error
247
294
  UI.error "#{index_log} Error translating: #{error}"
248
295
  else
@@ -267,12 +314,12 @@ module Fastlane
267
314
  if translated_string.is_a? Hash
268
315
  strings = {}
269
316
  translated_string.each do |pl_key, value|
270
- UI.message "#{index_log} Translating #{real_key} > #{pl_key} - #{value}"
317
+ UI.message "#{index_log} #{real_key}: #{pl_key} - #{value}"
271
318
  strings[pl_key] = LocoStrings::LocoString.new(pl_key, value, context)
272
319
  end
273
320
  string = LocoStrings::LocoVariantions.new(real_key, strings, context)
274
321
  elsif translated_string && !translated_string.empty?
275
- UI.message "#{index_log} Translating #{real_key} - #{translated_string}"
322
+ UI.message "#{index_log} #{real_key}: #{translated_string}"
276
323
  string = LocoStrings::LocoString.new(real_key, translated_string, context)
277
324
  end
278
325
  @output_hash[real_key] = string
@@ -299,13 +346,14 @@ module Fastlane
299
346
  end
300
347
  file.write
301
348
  else
349
+ default_state = if :mark_for_review then "needs_review" else "translated" end
302
350
  @xcfile.update_file_path(@params[:target_file])
303
351
  @output_hash.each do |key, value|
304
352
  if value.is_a? LocoStrings::LocoString
305
- @xcfile.update(key, value.value, value.comment, "translated", @params[:target_language])
353
+ @xcfile.update(key, value.value, value.comment, default_state, @params[:target_language])
306
354
  elsif value.is_a? LocoStrings::LocoVariantions
307
355
  value.strings.each do |pl_key, variant|
308
- @xcfile.update_variation(key, pl_key, variant.value, variant.comment, "translated", @params[:target_language])
356
+ @xcfile.update_variation(key, pl_key, variant.value, variant.comment, default_state, @params[:target_language])
309
357
  end
310
358
  end
311
359
  end
@@ -1,5 +1,5 @@
1
1
  module Fastlane
2
2
  module TranslateGpt
3
- VERSION = "0.1.8.2"
3
+ VERSION = '0.1.10'
4
4
  end
5
5
  end
metadata CHANGED
@@ -1,43 +1,42 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fastlane-plugin-translate_gpt
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.8.2
4
+ version: 0.1.10
5
5
  platform: ruby
6
6
  authors:
7
7
  - Aleksei Cherepanov
8
- autorequire:
9
8
  bindir: bin
10
9
  cert_chain: []
11
- date: 2024-08-12 00:00:00.000000000 Z
10
+ date: 2025-02-09 00:00:00.000000000 Z
12
11
  dependencies:
13
12
  - !ruby/object:Gem::Dependency
14
- name: ruby-openai
13
+ name: loco_strings
15
14
  requirement: !ruby/object:Gem::Requirement
16
15
  requirements:
17
16
  - - "~>"
18
17
  - !ruby/object:Gem::Version
19
- version: '3.7'
18
+ version: 0.1.5
20
19
  type: :runtime
21
20
  prerelease: false
22
21
  version_requirements: !ruby/object:Gem::Requirement
23
22
  requirements:
24
23
  - - "~>"
25
24
  - !ruby/object:Gem::Version
26
- version: '3.7'
25
+ version: 0.1.5
27
26
  - !ruby/object:Gem::Dependency
28
- name: loco_strings
27
+ name: ruby-openai
29
28
  requirement: !ruby/object:Gem::Requirement
30
29
  requirements:
31
30
  - - "~>"
32
31
  - !ruby/object:Gem::Version
33
- version: 0.1.4.1
32
+ version: 7.3.1
34
33
  type: :runtime
35
34
  prerelease: false
36
35
  version_requirements: !ruby/object:Gem::Requirement
37
36
  requirements:
38
37
  - - "~>"
39
38
  - !ruby/object:Gem::Version
40
- version: 0.1.4.1
39
+ version: 7.3.1
41
40
  - !ruby/object:Gem::Dependency
42
41
  name: bundler
43
42
  requirement: !ruby/object:Gem::Requirement
@@ -178,7 +177,6 @@ dependencies:
178
177
  - - ">="
179
178
  - !ruby/object:Gem::Version
180
179
  version: '0'
181
- description:
182
180
  email: ftp27host@gmail.com
183
181
  executables: []
184
182
  extensions: []
@@ -197,7 +195,6 @@ metadata:
197
195
  homepage_uri: https://github.com/ftp27/fastlane-plugin-translate_gpt
198
196
  source_code_uri: https://github.com/ftp27/fastlane-plugin-translate_gpt
199
197
  github_repo: https://github.com/ftp27/fastlane-plugin-translate_gpt
200
- post_install_message:
201
198
  rdoc_options: []
202
199
  require_paths:
203
200
  - lib
@@ -212,8 +209,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
212
209
  - !ruby/object:Gem::Version
213
210
  version: '0'
214
211
  requirements: []
215
- rubygems_version: 3.4.19
216
- signing_key:
212
+ rubygems_version: 3.6.2
217
213
  specification_version: 4
218
214
  summary: This fastlane plugin provides an easy way to use the OpenAI GPT language
219
215
  model to translate strings in your iOS application.