gooddata 2.3.1-java → 2.3.3-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. checksums.yaml +4 -4
  2. data/.gdc-ii-config.yaml +1 -1
  3. data/.github/workflows/build.yml +10 -16
  4. data/.github/workflows/check-extended.yaml +245 -0
  5. data/.github/workflows/check.yaml +190 -0
  6. data/.github/workflows/gate.yaml +200 -0
  7. data/.github/workflows/pre-merge.yml +23 -54
  8. data/.travis.yml +0 -35
  9. data/CHANGELOG.md +6 -0
  10. data/Dockerfile +4 -10
  11. data/Dockerfile.jruby +14 -2
  12. data/DockerfileOldImage +88 -0
  13. data/SDK_VERSION +1 -1
  14. data/VERSION +1 -1
  15. data/bin/provision.sh +1 -1
  16. data/bin/release.sh +1 -1
  17. data/bin/rollout.sh +1 -1
  18. data/bin/user_filters.sh +1 -1
  19. data/bin/users.sh +1 -1
  20. data/ci/bigquery/pom.xml +1 -1
  21. data/ci/postgresql/pom.xml +1 -1
  22. data/ci/snowflake/pom.xml +11 -1
  23. data/gooddata.gemspec +27 -11
  24. data/lib/gooddata/cloud_resources/mssql/mssql_client.rb +2 -1
  25. data/lib/gooddata/cloud_resources/mysql/mysql_client.rb +2 -1
  26. data/lib/gooddata/cloud_resources/postgresql/postgresql_client.rb +2 -1
  27. data/lib/gooddata/cloud_resources/redshift/redshift_client.rb +2 -1
  28. data/lib/gooddata/cloud_resources/snowflake/snowflake_client.rb +58 -3
  29. data/lib/gooddata/helpers/global_helpers.rb +1 -1
  30. data/lib/gooddata/lcm/actions/import_object_collections.rb +23 -2
  31. data/lib/gooddata/lcm/actions/synchronize_clients.rb +31 -3
  32. data/lib/gooddata/lcm/actions/synchronize_dataset_mappings.rb +6 -2
  33. data/lib/gooddata/lcm/actions/synchronize_etls_in_segment.rb +3 -3
  34. data/lib/gooddata/lcm/actions/synchronize_user_filters.rb +5 -1
  35. data/lib/gooddata/lcm/lcm2.rb +4 -0
  36. data/lib/gooddata/models/domain.rb +16 -4
  37. data/lib/gooddata/models/metadata/label.rb +21 -4
  38. data/lib/gooddata/models/model.rb +1 -1
  39. data/lib/gooddata/models/project.rb +17 -2
  40. data/lib/gooddata/models/segment.rb +3 -2
  41. data/lib/gooddata/models/user_filters/user_filter_builder.rb +4 -3
  42. data/lib/gooddata/rest/client.rb +6 -1
  43. data/lib/gooddata/rest/connection.rb +20 -5
  44. metadata +185 -150
@@ -49,6 +49,7 @@ module GoodData
49
49
  raise('Missing connection info for MSSQL client')
50
50
  end
51
51
 
52
+ # When update driver class then also updating driver class using in connection(..) method below
52
53
  Java.com.microsoft.sqlserver.jdbc.SQLServerDriver
53
54
  end
54
55
 
@@ -90,7 +91,7 @@ module GoodData
90
91
  prop.setProperty('userName', authentication['userName'])
91
92
  prop.setProperty('password', authentication['password'])
92
93
 
93
- @connection = java.sql.DriverManager.getConnection(connection_string, prop)
94
+ @connection = com.microsoft.sqlserver.jdbc.SQLServerDriver.new.connect(connection_string, prop)
94
95
  end
95
96
 
96
97
  def validate
@@ -49,6 +49,7 @@ module GoodData
49
49
  raise('Missing connection info for Mysql client')
50
50
  end
51
51
 
52
+ # When update driver class then also updating driver class using in connection(..) method below
52
53
  Java.com.mysql.jdbc.Driver
53
54
  end
54
55
 
@@ -84,7 +85,7 @@ module GoodData
84
85
  prop = java.util.Properties.new
85
86
  prop.setProperty('user', @authentication['basic']['userName'])
86
87
  prop.setProperty('password', @authentication['basic']['password'])
87
- @connection = java.sql.DriverManager.getConnection(@url, prop)
88
+ @connection = com.mysql.jdbc.Driver.new.connect(@url, prop)
88
89
  @connection.set_auto_commit(false)
89
90
  end
90
91
 
@@ -49,6 +49,7 @@ module GoodData
49
49
  raise('Missing connection info for Postgres client')
50
50
  end
51
51
 
52
+ # When update driver class then also updating driver class using in connection(..) method below
52
53
  Java.org.postgresql.Driver
53
54
  end
54
55
 
@@ -86,7 +87,7 @@ module GoodData
86
87
  prop.setProperty('password', @authentication['basic']['password'])
87
88
  prop.setProperty('schema', @schema)
88
89
 
89
- @connection = java.sql.DriverManager.getConnection(@url, prop)
90
+ @connection = org.postgresql.Driver.new.connect(@url, prop)
90
91
  statement = @connection.create_statement
91
92
  statement.execute("#{POSTGRES_SET_SCHEMA_COMMAND} #{@schema}")
92
93
  @connection.set_auto_commit(false)
@@ -38,6 +38,7 @@ module GoodData
38
38
  end
39
39
  @debug = options['debug'] == true || options['debug'] == 'true'
40
40
 
41
+ # When update driver class then also updating driver class using in connection(..) method below
41
42
  Java.com.amazon.redshift.jdbc42.Driver
42
43
  end
43
44
 
@@ -83,7 +84,7 @@ module GoodData
83
84
  prop.setProperty('DbUser', @authentication['iam']['dbUser'])
84
85
  end
85
86
 
86
- @connection = java.sql.DriverManager.getConnection(full_url, prop)
87
+ @connection = com.amazon.redshift.jdbc42.Driver.new.connect(full_url, prop)
87
88
  end
88
89
 
89
90
  private
@@ -15,6 +15,14 @@ Dir.glob(base + 'drivers/*.jar').each do |file|
15
15
  require file unless file.start_with?('lcm-snowflake-driver')
16
16
  end
17
17
 
18
+ java_import 'java.io.StringReader'
19
+ java_import 'org.bouncycastle.openssl.PEMParser'
20
+ java_import 'org.bouncycastle.jce.provider.BouncyCastleProvider'
21
+ java_import 'org.bouncycastle.asn1.pkcs.PrivateKeyInfo'
22
+ java_import 'org.bouncycastle.pkcs.PKCS8EncryptedPrivateKeyInfo'
23
+ java_import 'org.bouncycastle.openssl.jcajce.JceOpenSSLPKCS8DecryptorProviderBuilder'
24
+ java_import 'org.bouncycastle.openssl.jcajce.JcaPEMKeyConverter'
25
+
18
26
  module GoodData
19
27
  module CloudResources
20
28
  class SnowflakeClient < CloudResourceClient
@@ -41,6 +49,7 @@ module GoodData
41
49
 
42
50
  end
43
51
 
52
+ # When update driver class then also updating driver class using in connection(..) method below
44
53
  Java.net.snowflake.client.jdbc.SnowflakeDriver
45
54
  end
46
55
 
@@ -74,13 +83,23 @@ module GoodData
74
83
  GoodData.logger.info "Setting up connection to Snowflake #{@url}"
75
84
 
76
85
  prop = java.util.Properties.new
77
- prop.setProperty('user', @authentication['basic']['userName'])
78
- prop.setProperty('password', @authentication['basic']['password'])
79
86
  prop.setProperty('schema', @schema)
80
87
  prop.setProperty('warehouse', @warehouse)
81
88
  prop.setProperty('db', @database)
82
89
 
83
- @connection = java.sql.DriverManager.getConnection(@url, prop)
90
+ if @authentication['keyPair']
91
+ prop.setProperty('user', @authentication['keyPair']['userName'])
92
+ private_key_str = build_private_key(@authentication['keyPair']['privateKey'], @authentication['keyPair']['passPhrase'])
93
+ prop.setProperty('private_key_base64', private_key_str)
94
+ else
95
+ prop.setProperty('user', @authentication['basic']['userName'])
96
+ prop.setProperty('password', @authentication['basic']['password'])
97
+ end
98
+
99
+ # Add JDBC_QUERY_RESULT_FORMAT parameter to fix unsafe memory issue of Snowflake JDBC driver
100
+ prop.setProperty('JDBC_QUERY_RESULT_FORMAT', 'JSON')
101
+
102
+ @connection = com.snowflake.client.jdbc.SnowflakeDriver.new.connect(@url, prop)
84
103
  end
85
104
 
86
105
  def build_url(url)
@@ -96,6 +115,42 @@ module GoodData
96
115
 
97
116
  url
98
117
  end
118
+
119
+ private
120
+
121
+ def build_private_key(private_key_string, pass_phrase)
122
+ java.security.Security.removeProvider("BC")
123
+ java.security.Security.addProvider(BouncyCastleProvider.new)
124
+
125
+ begin
126
+ pem_parser = PEMParser.new(StringReader.new(private_key_string))
127
+ pem_object = pem_parser.readObject
128
+
129
+ if pem_object.is_a?(PKCS8EncryptedPrivateKeyInfo)
130
+ builder = JceOpenSSLPKCS8DecryptorProviderBuilder.new
131
+ decryptor = builder.build(pass_phrase.to_java.to_char_array)
132
+ private_key_info = pem_object.decryptPrivateKeyInfo(decryptor)
133
+ elsif pem_object.is_a?(PrivateKeyInfo)
134
+ private_key_info = pem_object
135
+ end
136
+
137
+ ensure
138
+ pem_parser&.close
139
+ end
140
+
141
+ converter = JcaPEMKeyConverter.new
142
+ private_key = converter.getPrivateKey(private_key_info)
143
+ pem_str = convert_private_key(private_key)
144
+ java.util.Base64.getEncoder.encodeToString(pem_str.encode('UTF-8').bytes)
145
+ end
146
+
147
+ def convert_private_key(private_key)
148
+ pem = "-----BEGIN PRIVATE KEY-----\n"
149
+ encoder = java.util.Base64.getMimeEncoder(64, "\n".to_java_bytes)
150
+ base64 = encoder.encodeToString(private_key.getEncoded)
151
+ "#{pem}#{base64}\n-----END PRIVATE KEY-----"
152
+ end
153
+
99
154
  end
100
155
  end
101
156
  end
@@ -33,7 +33,7 @@ module GoodData
33
33
  include Hashie::Extensions::DeepMerge
34
34
  end
35
35
 
36
- set_const :GD_MAX_RETRY, (ENV['GD_MAX_RETRY'] && ENV['GD_MAX_RETRY'].to_i) || 12
36
+ set_const :GD_MAX_RETRY, (ENV['GD_MAX_RETRY'] && ENV['GD_MAX_RETRY'].to_i) || 14
37
37
  AES_256_CBC_CIPHER = 'aes-256-cbc'
38
38
 
39
39
  class << self
@@ -20,28 +20,35 @@ module GoodData
20
20
 
21
21
  description 'Synchronization Info'
22
22
  param :synchronize, array_of(instance_of(Type::SynchronizationInfoType)), required: true, generated: true
23
+
24
+ description 'Number Of Threads'
25
+ param :number_of_threads, instance_of(Type::StringType), required: false, default: '10'
23
26
  end
24
27
 
25
28
  class << self
26
29
  def call(params)
27
30
  results = []
28
31
 
32
+ GoodData.logger.info 'Starting ImportObjectCollections action'
29
33
  client = params.gdc_gd_client
30
34
  development_client = params.development_client
35
+ number_of_threads = Integer(params.number_of_threads || '8')
31
36
 
32
- params.synchronize.peach do |info|
37
+ params.synchronize.peach(number_of_threads) do |info|
33
38
  from = info.from
34
39
  to_projects = info.to
35
40
  transfer_uris = info.transfer_uris
36
41
 
37
42
  from_project = development_client.projects(from) || fail("Invalid 'from' project specified - '#{from}'")
38
43
 
39
- to_projects.peach do |entry|
44
+ to_projects.peach(number_of_threads) do |entry|
40
45
  pid = entry[:pid]
41
46
  to_project = client.projects(pid) || fail("Invalid 'to' project specified - '#{pid}'")
42
47
 
43
48
  if transfer_uris.any?
49
+ logging_data(from, pid, transfer_uris, true)
44
50
  from_project.partial_md_export(transfer_uris, project: to_project)
51
+ logging_data(from, pid, transfer_uris, false)
45
52
  end
46
53
 
47
54
  results << {
@@ -54,6 +61,20 @@ module GoodData
54
61
 
55
62
  results
56
63
  end
64
+
65
+ private
66
+
67
+ def logging_data(from_project, to_project, transfer_uris, start_action)
68
+ if start_action
69
+ # Logging to execution log
70
+ GoodData.logger.info "Starting import objects, from_project: #{from_project}, to_project: #{to_project}"
71
+ # Logging to Splunk log
72
+ GoodData.gd_logger.info "Starting import objects, action=objects_import, from_project=#{from_project}, to_project=#{to_project}, transfer_uris=#{transfer_uris}"
73
+ else
74
+ GoodData.logger.info "Success import objects, from_project: #{from_project}, to_project: #{to_project}"
75
+ GoodData.gd_logger.info "Success import objects, action=objects_import, from_project=#{from_project}, to_project=#{to_project}"
76
+ end
77
+ end
57
78
  end
58
79
  end
59
80
  end
@@ -50,6 +50,9 @@ module GoodData
50
50
 
51
51
  description 'Sync failed list'
52
52
  param :sync_failed_list, instance_of(Type::HashType), required: false
53
+
54
+ description 'Synchronize clients time limit'
55
+ param :sync_clients_timeout, instance_of(Type::StringType), required: false
53
56
  end
54
57
 
55
58
  RESULT_HEADER = [
@@ -61,6 +64,7 @@ module GoodData
61
64
 
62
65
  class << self
63
66
  def call(params)
67
+ GoodData.logger.info 'Starting SynchronizeClients action'
64
68
  client = params.gdc_gd_client
65
69
 
66
70
  domain_name = params.organization || params.domain
@@ -69,6 +73,8 @@ module GoodData
69
73
  data_product = params.data_product
70
74
  domain_segments = domain.segments(:all, data_product)
71
75
  keep_only_previous_masters_count = Integer(params.keep_only_previous_masters_count || "-1")
76
+ sync_clients_options = {}
77
+ sync_clients_options = sync_clients_options.merge(:time_limit => Integer(params.sync_clients_timeout)) if params.sync_clients_timeout
72
78
 
73
79
  segments = params.segments.map do |seg|
74
80
  domain_segments.find do |s|
@@ -91,7 +97,9 @@ module GoodData
91
97
  segment.master_project = master
92
98
  segment.save
93
99
 
94
- res = segment.synchronize_clients
100
+ GoodData.logger.info "Starting synchronize clients for segment: '#{segment.segment_id}' with master workspace: '#{current_master[:master_project_id]}'"
101
+ res = segment.synchronize_clients(sync_clients_options)
102
+ GoodData.logger.info "Finish synchronize clients for segment: '#{segment.segment_id}'"
95
103
 
96
104
  sync_result = res.json['synchronizationResult']
97
105
  failed_count = sync_result['failedClients']['count']
@@ -170,7 +178,27 @@ module GoodData
170
178
 
171
179
  # Synchronize failure for all clients in segment
172
180
  if continue_on_error && success_count.zero? && failed_count.positive?
173
- segment_warning_message = "Failed to synchronize clients for #{segment.segment_id} segment. Details: #{sync_result['links']['details']}"
181
+ segment_warning_message = "Failed to synchronize all clients for #{segment.segment_id} segment. Details: #{sync_result['links']['details']}"
182
+ if sync_result['links'] && sync_result['links']['details'] # rubocop:disable Style/SafeNavigation
183
+ begin
184
+ client = params.gdc_gd_client
185
+ response = client.get sync_result['links']['details']
186
+ error_detail_result = response['synchronizationResultDetails']
187
+
188
+ if error_detail_result && error_detail_result['items'] # rubocop:disable Style/SafeNavigation
189
+ error_count = 1
190
+ error_detail_result['items'].each do |item|
191
+ break if error_count > 5
192
+
193
+ GoodData.logger.warn(error_message(item, segment))
194
+ error_count += 1
195
+ end
196
+ end
197
+ rescue StandardError => ex
198
+ GoodData.logger.warn "Failed to fetch result of synchronize clients. Error: #{ex.message}"
199
+ end
200
+ end
201
+
174
202
  add_failed_segment(segment.segment_id, segment_warning_message, short_name, params)
175
203
  return
176
204
  end
@@ -192,7 +220,7 @@ module GoodData
192
220
 
193
221
  def error_message(error_item, segment)
194
222
  error_client_id = error_item['id']
195
- error_message = "Failed to synchronize #{error_client_id} client in #{segment.segment_id} segment."
223
+ error_message = "Failed to synchronize #{error_client_id} client in #{segment.segment_id} segment"
196
224
  error_message = "#{error_message}. Detail: #{error_item['error']['message']}" if error_item['error'] && error_item['error']['message']
197
225
 
198
226
  error_message = "#{error_message}. Error items: #{error_item['error']['parameters']}" if error_item['error'] && error_item['error']['parameters']
@@ -33,6 +33,9 @@ module GoodData
33
33
 
34
34
  description 'Sync failed list'
35
35
  param :sync_failed_list, instance_of(Type::HashType), required: false
36
+
37
+ description 'Number Of Threads'
38
+ param :number_of_threads, instance_of(Type::StringType), required: false, default: '10'
36
39
  end
37
40
 
38
41
  RESULT_HEADER = %i[from to count status]
@@ -45,8 +48,9 @@ module GoodData
45
48
 
46
49
  client = params.gdc_gd_client
47
50
  development_client = params.development_client
51
+ number_of_threads = Integer(params.number_of_threads || '8')
48
52
 
49
- params.synchronize.peach do |info|
53
+ params.synchronize.peach(number_of_threads) do |info|
50
54
  from_project = info.from
51
55
  to_projects = info.to
52
56
 
@@ -60,7 +64,7 @@ module GoodData
60
64
  if dataset_mapping&.dig('datasetMappings', 'items').nil? || dataset_mapping['datasetMappings']['items'].empty?
61
65
  params.gdc_logger.info "Project: '#{from.title}', PID: '#{from.pid}' has no model mapping, skip synchronizing model mapping."
62
66
  else
63
- to_projects.peach do |to|
67
+ to_projects.peach(number_of_threads) do |to|
64
68
  pid = to[:pid]
65
69
  next if sync_failed_project(pid, params)
66
70
 
@@ -179,10 +179,10 @@ module GoodData
179
179
  hidden_params_for_this_client_schedule_name = hidden_params_for_this_client[schedule_name]
180
180
 
181
181
  schedule.update_params(schedule_additional_params) if schedule_additional_params
182
- schedule.update_params(**params_for_all_schedules_in_all_projects) if params_for_all_schedules_in_all_projects
183
- schedule.update_params(**params_for_all_projects_schedule_name) if params_for_all_projects_schedule_name
182
+ schedule.update_params(params_for_all_schedules_in_all_projects) if params_for_all_schedules_in_all_projects
183
+ schedule.update_params(params_for_all_projects_schedule_name) if params_for_all_projects_schedule_name
184
184
  schedule.update_params(params_for_all_schedules_in_this_client) if params_for_all_schedules_in_this_client
185
- schedule.update_params(**params_for_this_client_schedule_name) if params_for_this_client_schedule_name
185
+ schedule.update_params(params_for_this_client_schedule_name) if params_for_this_client_schedule_name
186
186
 
187
187
  schedule.update_hidden_params(schedule_additional_hidden_params) if schedule_additional_hidden_params
188
188
  schedule.update_hidden_params(hidden_params_for_all_schedules_in_all_projects) if hidden_params_for_all_schedules_in_all_projects
@@ -74,6 +74,9 @@ module GoodData
74
74
 
75
75
  description 'Makes the brick run without altering user filters'
76
76
  param :dry_run, instance_of(Type::StringType), required: false, default: false
77
+
78
+ description 'Number Of Threads'
79
+ param :number_of_threads, instance_of(Type::StringType), required: false, default: '10'
77
80
  end
78
81
 
79
82
  class << self
@@ -104,6 +107,7 @@ module GoodData
104
107
  symbolized_config = GoodData::Helpers.symbolize_keys(symbolized_config)
105
108
  symbolized_config[:labels] = symbolized_config[:labels].map { |l| GoodData::Helpers.symbolize_keys(l) }
106
109
  multiple_projects_column = params.multiple_projects_column
110
+ number_of_threads = Integer(params.number_of_threads || '10')
107
111
 
108
112
  mode = params.sync_mode
109
113
  unless MODES.include?(mode)
@@ -196,7 +200,7 @@ module GoodData
196
200
 
197
201
  unless run_params[:do_not_touch_filters_that_are_not_mentioned]
198
202
  to_be_deleted_clients = UserBricksHelper.non_working_clients(domain_clients, working_client_ids)
199
- to_be_deleted_clients.peach do |c|
203
+ to_be_deleted_clients.peach(number_of_threads) do |c|
200
204
  begin
201
205
  current_project = c.project
202
206
  users = users_by_project[c.client_id]
@@ -295,6 +295,10 @@ module GoodData
295
295
  end
296
296
 
297
297
  def perform(mode, params = {})
298
+ # Default setting for $pmap_default_thread_count = 20 in gooddata.rb file. We are going to decrease default
299
+ # number of threads count to 10 for LCM bricks only
300
+ $pmap_default_thread_count = 10 # rubocop:disable GlobalVars
301
+
298
302
  params = convert_params(params)
299
303
 
300
304
  GoodData.gd_logger.brick = mode
@@ -22,7 +22,15 @@ module GoodData
22
22
  'pt-PT' => 'Portuguese/Portugal',
23
23
  'fr-FR' => 'French',
24
24
  'de-DE' => 'German',
25
- 'ja-JP' => 'Japanese'
25
+ 'ja-JP' => 'Japanese',
26
+ 'it-IT' => 'Italian',
27
+ 'es-419' => 'Spanish/Latin America',
28
+ 'fr-CA' => 'French/Canada',
29
+ 'en-GB' => 'English/UK',
30
+ 'en-AU' => 'English/Australian',
31
+ 'fi-FI' => 'Finnish',
32
+ 'zh-Hant' => 'Traditional Chinese',
33
+ 'zh-HK' => 'Cantonese'
26
34
  }
27
35
 
28
36
  class << self
@@ -276,12 +284,16 @@ to new properties (email=#{user_data[:email]}, sso_provider=#{user_data[:sso_pro
276
284
  [{ type: :successful, :action => :user_changed_in_domain, user: updated_user }]
277
285
  end
278
286
  rescue RuntimeError => e
287
+ error_message = e.message
288
+ user.delete(:password)
279
289
  if !domain_user
280
- GoodData.logger.error("Failed to add user=#{user_login} to domain=#{default_domain_name}. Error: #{e.message}")
290
+ GoodData.logger.error("Failed to add user=#{user_login} to domain=#{default_domain_name}. Error: #{error_message}")
281
291
  else
282
- GoodData.logger.error("Failed to update user=#{user_login} in domain=#{default_domain_name}. Error: #{e.message}")
292
+ error_message = 'Invalid user data or update new password cannot be the same as old password' if error_message == '400 Bad Request'
293
+
294
+ GoodData.logger.error("Failed to update user=#{user_login} in domain=#{default_domain_name}. Error: #{error_message}")
283
295
  end
284
- [{ type: :failed, :user => user, message: e }]
296
+ [{ type: :failed, :user => user, message: error_message }]
285
297
  end
286
298
  end
287
299
  end
@@ -44,14 +44,31 @@ module GoodData
44
44
  # In the case filter a specific value, because the API /validElements only filter by partial match, we need to filter again at client side for exact match.
45
45
  # @return [Array] Results
46
46
  def get_valid_elements(*args)
47
+ results = {}
47
48
  if args && !args.empty? && args.first[:filter]
49
+ # Support paging in case filter by a specific value
48
50
  params = args.first
49
- params[:limit] = 100_000
50
- results, = valid_elements params
51
- results['validElements']['items'] = results['validElements']['items'].select do |i|
52
- i['element']['title'] == params[:filter]
51
+ all_valid_elements = []
52
+ offset = 0
53
+ paging_limit = 10_000
54
+
55
+ loop do
56
+ params[:offset] = offset
57
+ params[:limit] = paging_limit
58
+ results, = valid_elements params
59
+ all_valid_elements += results['validElements']['items'].select do |i|
60
+ i['element']['title'] == params[:filter]
61
+ end
62
+
63
+ if results['validElements']['items'].count < paging_limit
64
+ results['validElements']['items'] = all_valid_elements
65
+ break
66
+ else
67
+ offset += paging_limit
68
+ end
53
69
  end
54
70
  else
71
+ # This case will support paging by the method which call this method eg: values(...) method
55
72
  results, = valid_elements(*args)
56
73
  end
57
74
  results
@@ -166,7 +166,7 @@ module GoodData
166
166
 
167
167
  dir = Dir.mktmpdir
168
168
  begin
169
- Zip::File.open("#{dir}/upload.zip", Zip::File::CREATE) do |zip|
169
+ Zip::File.open("#{dir}/upload.zip", create: true) do |zip|
170
170
  # TODO: make sure schema columns match CSV column names
171
171
  zip.get_output_stream('upload_info.json') { |f| f.puts JSON.pretty_generate(manifest) }
172
172
 
@@ -1434,9 +1434,14 @@ module GoodData
1434
1434
  :crossDataCenterExport => '1'
1435
1435
  }
1436
1436
  }
1437
- result = client.post("#{md['maintenance']}/partialmdexport", export_payload)
1437
+ export_uri = "/gdc/md/#{pid}/maintenance/partialmdexport"
1438
+ GoodData.gd_logger.info("Project export action=objects_export, project_id=#{pid}, uri=#{export_uri}, export_status=start, export_objs=#{export_payload}") if GoodData.gd_logger
1439
+
1440
+ # Export api will take time to finish So increasing timeout during calling the api
1441
+ result = client.post(export_uri, export_payload, :timeout => 10)
1438
1442
  polling_url = result['partialMDArtifact']['status']['uri']
1439
1443
  token = result['partialMDArtifact']['token']
1444
+ GoodData.gd_logger.info("Project export action=objects_export, project_id=#{pid}, uri=#{polling_url}, export_status=polling") if GoodData.gd_logger
1440
1445
 
1441
1446
  polling_result = client.poll_on_response(polling_url, options) do |body|
1442
1447
  body['wTaskStatus'] && body['wTaskStatus']['status'] == 'RUNNING'
@@ -1445,6 +1450,9 @@ module GoodData
1445
1450
  messages = GoodData::Helpers.interpolate_error_messages(polling_result['wTaskStatus']['messages']).join(' ')
1446
1451
  fail ObjectsExportError, "Exporting objects failed with messages. #{messages}"
1447
1452
  end
1453
+
1454
+ GoodData.gd_logger.info("Project export action=objects_export, project_id=#{pid}, export_status=success") if GoodData.gd_logger
1455
+
1448
1456
  token
1449
1457
  end
1450
1458
 
@@ -1467,8 +1475,12 @@ module GoodData
1467
1475
  }
1468
1476
  }
1469
1477
 
1470
- result = client.post("#{md['maintenance']}/partialmdimport", import_payload)
1478
+ import_uri = "/gdc/md/#{pid}/maintenance/partialmdimport"
1479
+ GoodData.gd_logger.info("Project import action=objects_import, project_id=#{pid}, uri=#{import_uri}, import_status=start") if GoodData.gd_logger
1480
+
1481
+ result = client.post(import_uri, import_payload)
1471
1482
  polling_url = result['uri']
1483
+ GoodData.gd_logger.info("Project import action=objects_import, project_id=#{pid}, uri=#{polling_url}, import_status=polling") if GoodData.gd_logger
1472
1484
 
1473
1485
  polling_result = client.poll_on_response(polling_url, options) do |body|
1474
1486
  body['wTaskStatus'] && body['wTaskStatus']['status'] == 'RUNNING'
@@ -1478,6 +1490,9 @@ module GoodData
1478
1490
  messages = GoodData::Helpers.interpolate_error_messages(polling_result['wTaskStatus']['messages']).join(' ')
1479
1491
  fail ObjectsImportError, "Importing objects failed with messages. #{messages}"
1480
1492
  end
1493
+
1494
+ GoodData.gd_logger.info("Project import action=objects_import, project_id=#{pid}, uri=#{import_uri}, import_status=success") if GoodData.gd_logger
1495
+
1481
1496
  true
1482
1497
  end
1483
1498
 
@@ -193,13 +193,14 @@ module GoodData
193
193
 
194
194
  # Runs async process that walks through segments and provisions projects if necessary.
195
195
  #
196
+ # @param options [Hash] Options
196
197
  # @return [Array] Returns array of results
197
- def synchronize_clients
198
+ def synchronize_clients(options = {})
198
199
  sync_uri = SYNCHRONIZE_URI % [domain.obj_id, data_product.data_product_id, id]
199
200
  res = client.post sync_uri, nil
200
201
 
201
202
  # wait until the instance is created
202
- res = client.poll_on_response(res['asyncTask']['links']['poll'], :sleep_interval => 1) do |r|
203
+ res = client.poll_on_response(res['asyncTask']['links']['poll'], options.merge(:sleep_interval => 1)) do |r|
203
204
  r['synchronizationResult'].nil?
204
205
  end
205
206
 
@@ -51,7 +51,7 @@ module GoodData
51
51
  end
52
52
 
53
53
  def self.read_file(file, options = {})
54
- memo = Hash[]
54
+ memo = {}
55
55
  if row_based?(options)
56
56
  read_data_without_header(file, memo, options)
57
57
  else
@@ -180,7 +180,8 @@ module GoodData
180
180
 
181
181
  def self.create_lookups_cache(small_labels)
182
182
  small_labels.reduce({}) do |a, e|
183
- lookup = e.values(:limit => 1_000_000).reduce({}) do |a1, e1|
183
+ # The validElements API allow maximum paging with 10000 items
184
+ lookup = e.values(:limit => 10_000).reduce({}) do |a1, e1|
184
185
  a1[e1[:value]] = e1[:uri]
185
186
  a1
186
187
  end
@@ -513,7 +514,7 @@ module GoodData
513
514
  if to_create.empty?
514
515
  create_results = []
515
516
  else
516
- create_results = to_create.each_slice(100).flat_map do |batch|
517
+ create_results = to_create.each_slice(50).flat_map do |batch|
517
518
  batch.pmapcat do |related_uri, group|
518
519
  group.each(&:save)
519
520
  res = client.get("/gdc/md/#{project.pid}/userfilters?users=#{related_uri}")
@@ -25,7 +25,7 @@ module GoodData
25
25
  # Constants
26
26
  #################################
27
27
  DEFAULT_CONNECTION_IMPLEMENTATION = GoodData::Rest::Connection
28
- DEFAULT_SLEEP_INTERVAL = 10
28
+ DEFAULT_SLEEP_INTERVAL = 10 # 10 seconds
29
29
  DEFAULT_POLL_TIME_LIMIT = 5 * 60 * 60 # 5 hours
30
30
 
31
31
  #################################
@@ -353,6 +353,11 @@ module GoodData
353
353
  end
354
354
  sleep retry_time
355
355
  retry_time *= GoodData::Rest::Connection::RETRY_TIME_COEFFICIENT
356
+ # Polling response will wait result from calling APIs. So don't need wait a long time for each polling
357
+ if retry_time > DEFAULT_SLEEP_INTERVAL
358
+ retry_time = DEFAULT_SLEEP_INTERVAL
359
+ end
360
+
356
361
  GoodData::Rest::Client.retryable(:tries => Helpers::GD_MAX_RETRY, :refresh_token => proc { connection.refresh_token }) do
357
362
  response = get(link, process: process)
358
363
  end