gooddata 2.1.19 → 2.3.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (86) hide show
  1. checksums.yaml +4 -4
  2. data/.gdc-ii-config.yaml +42 -1
  3. data/.github/workflows/build.yml +67 -0
  4. data/.github/workflows/pre-merge.yml +72 -0
  5. data/.pronto.yml +1 -0
  6. data/.rubocop.yml +2 -14
  7. data/CHANGELOG.md +47 -0
  8. data/Dockerfile +27 -14
  9. data/Dockerfile.jruby +5 -15
  10. data/Dockerfile.ruby +5 -7
  11. data/Gemfile +4 -2
  12. data/LICENSE +4409 -16
  13. data/README.md +6 -6
  14. data/Rakefile +1 -1
  15. data/SDK_VERSION +1 -1
  16. data/VERSION +1 -1
  17. data/bin/run_brick.rb +7 -0
  18. data/ci/mssql/pom.xml +62 -0
  19. data/ci/mysql/pom.xml +62 -0
  20. data/ci/redshift/pom.xml +4 -5
  21. data/docker-compose.lcm.yml +42 -4
  22. data/docker-compose.yml +42 -0
  23. data/gooddata.gemspec +21 -21
  24. data/k8s/charts/lcm-bricks/Chart.yaml +1 -1
  25. data/lcm.rake +11 -8
  26. data/lib/gooddata/bricks/base_pipeline.rb +26 -0
  27. data/lib/gooddata/bricks/brick.rb +0 -1
  28. data/lib/gooddata/bricks/middleware/aws_middleware.rb +35 -9
  29. data/lib/gooddata/bricks/middleware/execution_result_middleware.rb +3 -3
  30. data/lib/gooddata/bricks/pipeline.rb +2 -14
  31. data/lib/gooddata/cloud_resources/blobstorage/blobstorage_client.rb +98 -0
  32. data/lib/gooddata/cloud_resources/mssql/drivers/.gitkeepme +0 -0
  33. data/lib/gooddata/cloud_resources/mssql/mssql_client.rb +122 -0
  34. data/lib/gooddata/cloud_resources/mysql/drivers/.gitkeepme +0 -0
  35. data/lib/gooddata/cloud_resources/mysql/mysql_client.rb +121 -0
  36. data/lib/gooddata/cloud_resources/postgresql/postgresql_client.rb +0 -1
  37. data/lib/gooddata/cloud_resources/redshift/drivers/.gitkeepme +0 -0
  38. data/lib/gooddata/cloud_resources/redshift/redshift_client.rb +0 -2
  39. data/lib/gooddata/cloud_resources/snowflake/snowflake_client.rb +18 -1
  40. data/lib/gooddata/helpers/data_helper.rb +9 -4
  41. data/lib/gooddata/lcm/actions/base_action.rb +157 -0
  42. data/lib/gooddata/lcm/actions/collect_data_product.rb +2 -1
  43. data/lib/gooddata/lcm/actions/collect_meta.rb +3 -1
  44. data/lib/gooddata/lcm/actions/collect_projects_warning_status.rb +53 -0
  45. data/lib/gooddata/lcm/actions/collect_segment_clients.rb +14 -0
  46. data/lib/gooddata/lcm/actions/initialize_continue_on_error_option.rb +87 -0
  47. data/lib/gooddata/lcm/actions/migrate_gdc_date_dimension.rb +31 -4
  48. data/lib/gooddata/lcm/actions/provision_clients.rb +34 -5
  49. data/lib/gooddata/lcm/actions/synchronize_cas.rb +24 -4
  50. data/lib/gooddata/lcm/actions/synchronize_clients.rb +112 -11
  51. data/lib/gooddata/lcm/actions/synchronize_dataset_mappings.rb +89 -0
  52. data/lib/gooddata/lcm/actions/synchronize_etls_in_segment.rb +48 -11
  53. data/lib/gooddata/lcm/actions/synchronize_kd_dashboard_permission.rb +103 -0
  54. data/lib/gooddata/lcm/actions/synchronize_ldm.rb +79 -23
  55. data/lib/gooddata/lcm/actions/synchronize_ldm_layout.rb +98 -0
  56. data/lib/gooddata/lcm/actions/synchronize_pp_dashboard_permission.rb +108 -0
  57. data/lib/gooddata/lcm/actions/synchronize_schedules.rb +31 -1
  58. data/lib/gooddata/lcm/actions/synchronize_user_filters.rb +26 -18
  59. data/lib/gooddata/lcm/actions/synchronize_user_groups.rb +30 -4
  60. data/lib/gooddata/lcm/actions/synchronize_users.rb +11 -10
  61. data/lib/gooddata/lcm/actions/update_metric_formats.rb +202 -0
  62. data/lib/gooddata/lcm/data/delete_from_lcm_release.sql.erb +5 -0
  63. data/lib/gooddata/lcm/exceptions/lcm_execution_warning.rb +15 -0
  64. data/lib/gooddata/lcm/helpers/check_helper.rb +19 -0
  65. data/lib/gooddata/lcm/helpers/release_table_helper.rb +42 -8
  66. data/lib/gooddata/lcm/lcm2.rb +50 -4
  67. data/lib/gooddata/lcm/user_bricks_helper.rb +9 -0
  68. data/lib/gooddata/mixins/inspector.rb +1 -1
  69. data/lib/gooddata/mixins/md_object_query.rb +1 -0
  70. data/lib/gooddata/models/data_source.rb +5 -1
  71. data/lib/gooddata/models/dataset_mapping.rb +36 -0
  72. data/lib/gooddata/models/ldm_layout.rb +38 -0
  73. data/lib/gooddata/models/metadata/label.rb +26 -27
  74. data/lib/gooddata/models/project.rb +230 -30
  75. data/lib/gooddata/models/project_creator.rb +83 -6
  76. data/lib/gooddata/models/schedule.rb +13 -1
  77. data/lib/gooddata/models/segment.rb +2 -1
  78. data/lib/gooddata/models/user_filters/user_filter_builder.rb +162 -68
  79. data/lib/gooddata/rest/connection.rb +5 -3
  80. data/lib/gooddata/rest/phmap.rb +2 -0
  81. data/lib/gooddata.rb +1 -0
  82. data/lib/gooddata_brick_base.rb +35 -0
  83. data/sonar-project.properties +6 -0
  84. metadata +100 -68
  85. data/lib/gooddata/bricks/middleware/bulk_salesforce_middleware.rb +0 -37
  86. data/lib/gooddata/cloud_resources/redshift/drivers/log4j.properties +0 -15
@@ -121,9 +121,10 @@ module GoodData
121
121
  users_brick_input: params.users_brick_users
122
122
  }
123
123
  all_clients = domain.clients(:all, data_product).to_a
124
- GoodData.gd_logger.info("Synchronizing in mode=#{mode}, number_of_clients=#{all_clients.size}, data_rows=#{user_filters.size}")
124
+ GoodData.gd_logger.info("Synchronizing in mode=#{mode}, number_of_clients=#{all_clients.size}, data_rows=#{user_filters.size} ,")
125
125
 
126
126
  GoodData.logger.info("Synchronizing in mode \"#{mode}\"")
127
+ results = []
127
128
  case mode
128
129
  when 'sync_project', 'sync_one_project_based_on_pid', 'sync_one_project_based_on_custom_id'
129
130
  if mode == 'sync_one_project_based_on_pid'
@@ -133,8 +134,10 @@ module GoodData
133
134
  end
134
135
  user_filters = user_filters.select { |f| f[:pid] == filter } if filter
135
136
 
136
- GoodData.gd_logger.info("Synchronizing in mode=#{mode}, project_id=#{project.pid}, data_rows=#{user_filters.size}")
137
- sync_user_filters(project, user_filters, run_params, symbolized_config)
137
+ GoodData.gd_logger.info("Synchronizing in mode=#{mode}, project_id=#{project.pid}, data_rows=#{user_filters.size} ,")
138
+ current_results = sync_user_filters(project, user_filters, run_params, symbolized_config)
139
+
140
+ results.concat(current_results[:results]) unless current_results.nil? || current_results[:results].empty?
138
141
  when 'sync_multiple_projects_based_on_pid', 'sync_multiple_projects_based_on_custom_id'
139
142
  users_by_project = run_params[:users_brick_input].group_by { |u| u[:pid] }
140
143
  user_filters.group_by { |u| u[:pid] }.flat_map.pmap do |id, new_filters|
@@ -148,8 +151,10 @@ module GoodData
148
151
  current_project = client.projects(id)
149
152
  end
150
153
 
151
- GoodData.gd_logger.info("Synchronizing in mode=#{mode}, project_id=#{id}, data_rows=#{new_filters.size}")
152
- sync_user_filters(current_project, new_filters, run_params.merge(users_brick_input: users), symbolized_config)
154
+ GoodData.gd_logger.info("Synchronizing in mode=#{mode}, project_id=#{id}, data_rows=#{new_filters.size} ,")
155
+ current_results = sync_user_filters(current_project, new_filters, run_params.merge(users_brick_input: users), symbolized_config)
156
+
157
+ results.concat(current_results[:results]) unless current_results.nil? || current_results[:results].empty?
153
158
  end
154
159
  when 'sync_domain_client_workspaces'
155
160
  domain_clients = all_clients
@@ -159,9 +164,9 @@ module GoodData
159
164
  end
160
165
 
161
166
  working_client_ids = []
167
+ semaphore = Mutex.new
162
168
 
163
169
  users_by_project = run_params[:users_brick_input].group_by { |u| u[:pid] }
164
- results = []
165
170
  user_filters.group_by { |u| u[multiple_projects_column] }.flat_map.pmap do |client_id, new_filters|
166
171
  users = users_by_project[client_id]
167
172
  fail "Client id cannot be empty" if client_id.blank?
@@ -178,36 +183,39 @@ module GoodData
178
183
  current_project = c.project
179
184
  fail "Client #{client_id} does not have project." unless current_project
180
185
 
181
- working_client_ids << client_id
186
+ semaphore.synchronize do
187
+ working_client_ids << client_id.to_s
188
+ end
182
189
 
183
- GoodData.gd_logger.info("Synchronizing in mode=#{mode}, client_id=#{client_id}, data_rows=#{new_filters.size}")
190
+ GoodData.gd_logger.info("Synchronizing in mode=#{mode}, client_id=#{client_id}, data_rows=#{new_filters.size} ,")
184
191
  partial_results = sync_user_filters(current_project, new_filters, run_params.merge(users_brick_input: users), symbolized_config)
185
- results.concat(partial_results[:results])
192
+ results.concat(partial_results[:results]) unless partial_results.nil? || partial_results[:results].empty?
186
193
  end
187
194
 
188
- unless run_params[:do_not_touch_filters_that_are_not_mentioned]
189
- domain_clients.peach do |c|
190
- next if working_client_ids.include?(c.client_id)
195
+ GoodData.gd_logger.info("Synchronizing in mode=#{mode}, working_client_ids=#{working_client_ids.join(', ')} ,") if working_client_ids.size < 50
191
196
 
197
+ unless run_params[:do_not_touch_filters_that_are_not_mentioned]
198
+ to_be_deleted_clients = UserBricksHelper.non_working_clients(domain_clients, working_client_ids)
199
+ to_be_deleted_clients.peach do |c|
192
200
  begin
193
201
  current_project = c.project
194
202
  users = users_by_project[c.client_id]
195
203
  params.gdc_logger.info "Delete all filters in project #{current_project.pid} of client #{c.client_id}"
196
204
 
197
- GoodData.gd_logger.info("Delete all filters in project_id=#{current_project.pid}, client_id=#{c.client_id}")
205
+ GoodData.gd_logger.info("Delete all filters in project_id=#{current_project.pid}, client_id=#{c.client_id} ,")
198
206
  current_results = sync_user_filters(current_project, [], run_params.merge(users_brick_input: users), symbolized_config)
199
207
 
200
- results.concat(current_results[:results])
208
+ results.concat(current_results[:results]) unless current_results.nil? || current_results[:results].empty?
201
209
  rescue StandardError => e
202
210
  params.gdc_logger.error "Failed to clear filters of #{c.client_id} due to: #{e.inspect}"
203
211
  end
204
212
  end
205
213
  end
206
-
207
- {
208
- results: results
209
- }
210
214
  end
215
+
216
+ {
217
+ results: results
218
+ }
211
219
  end
212
220
 
213
221
  def sync_user_filters(project, filters, params, filters_config)
@@ -27,11 +27,22 @@ module GoodData
27
27
 
28
28
  description 'Additional Hidden Parameters'
29
29
  param :additional_hidden_params, instance_of(Type::HashType), required: false
30
+
31
+ description 'Abort on error'
32
+ param :abort_on_error, instance_of(Type::StringType), required: false
33
+
34
+ description 'Collect synced status'
35
+ param :collect_synced_status, instance_of(Type::BooleanType), required: false
36
+
37
+ description 'Sync failed list'
38
+ param :sync_failed_list, instance_of(Type::HashType), required: false
30
39
  end
31
40
 
32
41
  class << self
33
42
  def call(params)
34
43
  results = ThreadSafe::Array.new
44
+ collect_synced_status = collect_synced_status(params)
45
+ failed_projects = ThreadSafe::Array.new
35
46
 
36
47
  client = params.gdc_gd_client
37
48
  development_client = params.development_client
@@ -40,17 +51,32 @@ module GoodData
40
51
  from_project = info.from
41
52
  to_projects = info.to
42
53
 
43
- from = development_client.projects(from_project) || fail("Invalid 'from' project specified - '#{from_project}'")
54
+ from = development_client.projects(from_project)
55
+ unless from
56
+ process_failed_project(from_project, "Invalid 'from' project specified - '#{from_project}'", failed_projects, collect_synced_status)
57
+ next
58
+ end
44
59
 
45
60
  to_projects.peach do |entry|
46
61
  pid = entry[:pid]
47
- to_project = client.projects(pid) || fail("Invalid 'to' project specified - '#{pid}'")
62
+ next if sync_failed_project(pid, params)
63
+
64
+ to_project = client.projects(pid)
65
+ unless to_project
66
+ process_failed_project(pid, "Invalid 'to' project specified - '#{pid}'", failed_projects, collect_synced_status)
67
+ next
68
+ end
48
69
 
49
- params.gdc_logger.info "Transferring User Groups, from project: '#{from.title}', PID: '#{from.pid}', to project: '#{to_project.title}', PID: '#{to_project.pid}'"
50
- results += GoodData::Project.transfer_user_groups(from, to_project)
70
+ begin
71
+ params.gdc_logger.info "Transferring User Groups, from project: '#{from.title}', PID: '#{from.pid}', to project: '#{to_project.title}', PID: '#{to_project.pid}'"
72
+ results += GoodData::Project.transfer_user_groups(from, to_project)
73
+ rescue StandardError => err
74
+ process_failed_project(pid, err.message, failed_projects, collect_synced_status)
75
+ end
51
76
  end
52
77
  end
53
78
 
79
+ process_failed_projects(failed_projects, short_name, params) if collect_synced_status
54
80
  results.uniq
55
81
  end
56
82
  end
@@ -190,7 +190,7 @@ module GoodData
190
190
  create_non_existing_user_groups: create_non_existing_user_groups,
191
191
  user_groups_cache: nil
192
192
  }
193
- GoodData.gd_logger.info("Synchronizing in mode=#{mode}, data_rows=#{new_users.size}")
193
+ GoodData.gd_logger.info("Synchronizing in mode=#{mode}, data_rows=#{new_users.size} ,")
194
194
 
195
195
  GoodData.logger.info("Synchronizing in mode \"#{mode}\"")
196
196
  results = case mode
@@ -202,7 +202,7 @@ module GoodData
202
202
  params.gdc_logger.info "#{user_ids.count - users.count} users were not found (or were deleted) in domain #{domain_name}" if user_ids.count > users.count
203
203
  params.gdc_logger.warn "Deleting #{users.count} users from domain #{domain_name}"
204
204
 
205
- GoodData.gd_logger.info("Synchronizing in mode=#{mode}, domain=#{domain_name}, data_rows=#{users.count}")
205
+ GoodData.gd_logger.info("Synchronizing in mode=#{mode}, domain=#{domain_name}, data_rows=#{users.count} ,")
206
206
  users.map(&:delete)
207
207
  when 'sync_project'
208
208
  project.import_users(new_users, common_params)
@@ -211,7 +211,7 @@ module GoodData
211
211
  begin
212
212
  project = client.projects(project_id)
213
213
 
214
- GoodData.gd_logger.info("Synchronizing in mode=#{mode}, project_id=#{project_id}, data_rows=#{users.count}")
214
+ GoodData.gd_logger.info("Synchronizing in mode=#{mode}, project_id=#{project_id}, data_rows=#{users.count} ,")
215
215
  project.import_users(users, common_params)
216
216
  rescue RestClient::ResourceNotFound
217
217
  fail "Project \"#{project_id}\" was not found. Please check your project ids in the source file"
@@ -224,7 +224,7 @@ module GoodData
224
224
  when 'sync_one_project_based_on_pid'
225
225
  filtered_users = new_users.select { |u| u[:pid] == project.pid }
226
226
 
227
- GoodData.gd_logger.info("Synchronizing in mode=#{mode}, data_rows=#{filtered_users.count}")
227
+ GoodData.gd_logger.info("Synchronizing in mode=#{mode}, data_rows=#{filtered_users.count} ,")
228
228
  project.import_users(filtered_users, common_params)
229
229
  when 'sync_one_project_based_on_custom_id'
230
230
  filter_value = UserBricksHelper.resolve_client_id(domain, project, data_product)
@@ -245,7 +245,7 @@ module GoodData
245
245
  end
246
246
 
247
247
  GoodData.logger.info("Project #{project.pid} will receive #{filtered_users.count} from #{new_users.count} users")
248
- GoodData.gd_logger.info("Synchronizing in mode=#{mode}, project_id=#{project.pid}, filtered_users=#{filtered_users.count}, data_rows=#{new_users.count}")
248
+ GoodData.gd_logger.info("Synchronizing in mode=#{mode}, project_id=#{project.pid}, filtered_users=#{filtered_users.count}, data_rows=#{new_users.count} ,")
249
249
  project.import_users(filtered_users, common_params)
250
250
  when 'sync_multiple_projects_based_on_custom_id'
251
251
  all_clients = domain.clients(:all, data_product).to_a
@@ -260,7 +260,7 @@ module GoodData
260
260
 
261
261
  GoodData.logger.info("Project #{project.pid} of client #{client_id} will receive #{users.count} users")
262
262
 
263
- GoodData.gd_logger.info("Synchronizing in mode=#{mode}, project_id=#{project.pid}, data_rows=#{users.count}")
263
+ GoodData.gd_logger.info("Synchronizing in mode=#{mode}, project_id=#{project.pid}, data_rows=#{users.count} ,")
264
264
  project.import_users(users, common_params)
265
265
  end
266
266
  when 'sync_domain_client_workspaces'
@@ -294,7 +294,7 @@ module GoodData
294
294
  working_client_ids << client_id.to_s
295
295
  GoodData.logger.info("Project #{project.pid} of client #{client_id} will receive #{users.count} users")
296
296
 
297
- GoodData.gd_logger.info("Synchronizing in mode=#{mode}, project_id=#{project.pid}, data_rows=#{users.count}")
297
+ GoodData.gd_logger.info("Synchronizing in mode=#{mode}, project_id=#{project.pid}, data_rows=#{users.count} ,")
298
298
  project.import_users(users, common_params)
299
299
  end
300
300
 
@@ -319,17 +319,17 @@ module GoodData
319
319
  end
320
320
  GoodData.logger.info("Synchronizing all users in project #{project.pid} of client #{c.client_id}")
321
321
 
322
- GoodData.gd_logger.info("Synchronizing all users in project_id=#{project.pid}, client_id=#{c.client_id}")
322
+ GoodData.gd_logger.info("Synchronizing all users in project_id=#{project.pid}, client_id=#{c.client_id} ,")
323
323
  res += project.import_users([], common_params)
324
324
  end
325
325
  end
326
326
 
327
327
  res
328
328
  when 'sync_domain_and_project'
329
- GoodData.gd_logger.info("Create users in mode=#{mode}, data_rows=#{new_users.count}")
329
+ GoodData.gd_logger.info("Create users in mode=#{mode}, data_rows=#{new_users.count} ,")
330
330
  domain.create_users(new_users, ignore_failures: ignore_failures)
331
331
 
332
- GoodData.gd_logger.info("Import users in mode=#{mode}, data_rows=#{new_users.count}")
332
+ GoodData.gd_logger.info("Import users in mode=#{mode}, data_rows=#{new_users.count} ,")
333
333
  project.import_users(new_users, common_params)
334
334
  end
335
335
 
@@ -390,6 +390,7 @@ module GoodData
390
390
 
391
391
  user_group = row[user_groups_column] || row[user_groups_column.to_sym]
392
392
  user_group = user_group.split(',').map(&:strip) if user_group
393
+ user_group = [] if row.headers.include?(user_groups_column) && !user_group
393
394
 
394
395
  ip_whitelist = row[ip_whitelist_column] || row[ip_whitelist_column.to_sym]
395
396
  ip_whitelist = ip_whitelist.split(',').map(&:strip) if ip_whitelist
@@ -0,0 +1,202 @@
1
+ # encoding: UTF-8
2
+ # frozen_string_literal: true
3
+ # Copyright (c) 2010-2021 GoodData Corporation. All rights reserved.
4
+ # This source code is licensed under the BSD-style license found in the
5
+ # LICENSE file in the root directory of this source tree.
6
+
7
+ require_relative 'base_action'
8
+
9
+ module GoodData
10
+ module LCM2
11
+ class UpdateMetricFormats < BaseAction
12
+ DESCRIPTION = 'Localize Metric Formats'
13
+
14
+ PARAMS = define_params(self) do
15
+ description 'Synchronization Info'
16
+ param :synchronize, array_of(instance_of(Type::SynchronizationInfoType)), required: true, generated: true
17
+
18
+ description 'Client Used for Connecting to GD'
19
+ param :gdc_gd_client, instance_of(Type::GdClientType), required: true
20
+
21
+ description 'Organization Name'
22
+ param :organization, instance_of(Type::StringType), required: false
23
+
24
+ description 'DataProduct to manage'
25
+ param :data_product, instance_of(Type::GDDataProductType), required: false
26
+
27
+ description 'Logger'
28
+ param :gdc_logger, instance_of(Type::GdLogger), required: true
29
+
30
+ description 'ADS Client'
31
+ param :ads_client, instance_of(Type::AdsClientType), required: false
32
+
33
+ description 'Input Source'
34
+ param :input_source, instance_of(Type::HashType), required: false
35
+
36
+ description 'Localization query'
37
+ param :localization_query, instance_of(Type::StringType), required: false
38
+
39
+ description 'Abort on error'
40
+ param :abort_on_error, instance_of(Type::StringType), required: false
41
+
42
+ description 'Collect synced status'
43
+ param :collect_synced_status, instance_of(Type::BooleanType), required: false
44
+
45
+ description 'Sync failed list'
46
+ param :sync_failed_list, instance_of(Type::HashType), required: false
47
+ end
48
+
49
+ RESULT_HEADER = %i[action ok_clients error_clients]
50
+
51
+ class << self
52
+ def load_metric_data(params)
53
+ collect_synced_status = collect_synced_status(params)
54
+
55
+ if params&.dig(:input_source, :metric_format) && params[:input_source][:metric_format].present?
56
+ metric_input_source = validate_input_source(params[:input_source], collect_synced_status)
57
+ return nil unless metric_input_source
58
+ else
59
+ return nil
60
+ end
61
+
62
+ metric_data_source = GoodData::Helpers::DataSource.new(metric_input_source)
63
+ begin
64
+ temp_csv = without_check(PARAMS, params) do
65
+ File.open(metric_data_source.realize(params), 'r:UTF-8')
66
+ end
67
+ rescue StandardError => e
68
+ GoodData.logger.warn("Unable to get metric input source, skip updating metric formats. Error: #{e.message} - #{e}")
69
+ return nil
70
+ end
71
+
72
+ metrics_hash = GoodData::Helpers::Csv.read_as_hash temp_csv
73
+ return nil if metrics_hash.empty?
74
+
75
+ expected_keys = %w[tag client_id format]
76
+ unless expected_keys.map(&:to_sym).all? { |s| metrics_hash.first.key? s }
77
+ GoodData.logger.warn("The input metric data is incorrect, expecting the following fields: #{expected_keys}")
78
+ return nil
79
+ end
80
+ metrics_hash
81
+ end
82
+
83
+ def validate_input_source(input_source, continue_on_error)
84
+ type = input_source[:type] if input_source&.dig(:type)
85
+ metric_format = input_source[:metric_format]
86
+ if type.blank?
87
+ raise "Incorrect configuration: 'type' of 'input_source' is required" unless continue_on_error
88
+
89
+ return nil
90
+ end
91
+
92
+ modified_input_source = input_source
93
+ case type
94
+ when 'ads', 'redshift', 'snowflake', 'bigquery', 'postgresql', 'mssql', 'mysql'
95
+ if metric_format[:query].blank?
96
+ GoodData.logger.warn("The metric input_source '#{type}' is missing property 'query'")
97
+ return nil
98
+ end
99
+
100
+ modified_input_source[:query] = metric_format[:query]
101
+ return modified_input_source
102
+ when 's3'
103
+ if metric_format[:file].blank?
104
+ GoodData.logger.warn("The metric input_source '#{type}' is missing property 'file'")
105
+ return nil
106
+ end
107
+
108
+ if modified_input_source.key?(:key)
109
+ modified_input_source[:key] = metric_format[:file]
110
+ else
111
+ modified_input_source[:file] = metric_format[:file]
112
+ end
113
+ return modified_input_source
114
+ when 'blobStorage'
115
+ if metric_format[:file].blank?
116
+ GoodData.logger.warn("The metric input_source '#{type}' is missing property 'file'")
117
+ return nil
118
+ end
119
+
120
+ modified_input_source[:file] = metric_format[:file]
121
+ return modified_input_source
122
+ when 'staging'
123
+ if metric_format[:file].blank?
124
+ GoodData.logger.warn("The metric input_source '#{type}' is missing property 'file'")
125
+ return nil
126
+ end
127
+
128
+ modified_input_source[:path] = metric_format[:file]
129
+ return modified_input_source
130
+ when 'web'
131
+ if metric_format[:url].blank?
132
+ GoodData.logger.warn("The metric input_source '#{type}' is missing property 'url'")
133
+ return nil
134
+ end
135
+
136
+ modified_input_source[:url] = metric_format[:url]
137
+ return modified_input_source
138
+ else
139
+ return nil
140
+ end
141
+ end
142
+
143
+ def get_clients_metrics(metric_data)
144
+ return {} if metric_data.nil?
145
+
146
+ metric_groups = {}
147
+ clients = metric_data.map { |row| row[:client_id] }.uniq
148
+ clients.each do |client|
149
+ next if client.blank?
150
+
151
+ formats = {}
152
+ metric_data.select { |row| row[:client_id] == client && row[:tag].present? && row[:format].present? }.each { |row| formats[row[:tag]] = row[:format] }
153
+ metric_groups[client.to_s] ||= formats
154
+ end
155
+ metric_groups
156
+ end
157
+
158
+ def call(params)
159
+ data = load_metric_data(params)
160
+ result = []
161
+ return result if data.nil?
162
+
163
+ metric_group = get_clients_metrics(data)
164
+ return result if metric_group.empty?
165
+
166
+ GoodData.logger.debug("Clients have metrics which will be modified: #{metric_group.keys}")
167
+ updated_clients = params.synchronize.map { |segment| segment.to.map { |client| client[:client_id] } }.flatten.uniq
168
+ GoodData.logger.debug("Updating clients: #{updated_clients}")
169
+ data_product = params.data_product
170
+ data_product_clients = data_product.clients
171
+ number_client_ok = 0
172
+ number_client_error = 0
173
+ collect_synced_status = collect_synced_status(params)
174
+ metric_group.each do |client_id, formats|
175
+ next if !updated_clients.include?(client_id) || (collect_synced_status && sync_failed_client(client_id, params))
176
+
177
+ client = data_product_clients.find { |c| c.id == client_id }
178
+ begin
179
+ GoodData.logger.info("Start updating metric format for client: '#{client_id}'")
180
+ metrics = client.project.metrics.to_a
181
+ formats.each do |tag, format|
182
+ next if tag.blank? || format.blank?
183
+
184
+ metrics_to_be_updated = metrics.select { |metric| metric.tags.include?(tag) }
185
+ metrics_to_be_updated.each do |metric|
186
+ metric.format = format
187
+ metric.save
188
+ end
189
+ end
190
+ number_client_ok += 1
191
+ GoodData.logger.info("Finished updating metric format for client: '#{client_id}'")
192
+ rescue StandardError => e
193
+ number_client_error += 1
194
+ GoodData.logger.warn("Failed to update metric format for client: '#{client_id}'. Error: #{e.message} - #{e}")
195
+ end
196
+ end
197
+ [{ :action => 'Update metric format', :ok_clients => number_client_ok, :error_clients => number_client_error }]
198
+ end
199
+ end
200
+ end
201
+ end
202
+ end
@@ -0,0 +1,5 @@
1
+ DELETE FROM "<%= table_name || 'LCM_RELEASE' %>"
2
+ WHERE
3
+ segment_id = '<%= segment_id %>'
4
+ AND master_project_id IN (<%= master_project_ids %>)
5
+ ;
@@ -0,0 +1,15 @@
1
+ # frozen_string_literal: true
2
+ # (C) 2019-2022 GoodData Corporation
3
+
4
+ module GoodData
5
+ class LcmExecutionWarning < RuntimeError
6
+ DEFAULT_MSG = 'Existing errors during lcm execution'
7
+
8
+ attr_reader :summary_error
9
+
10
+ def initialize(summary_error, message = DEFAULT_MSG)
11
+ super(message)
12
+ @summary_error = summary_error
13
+ end
14
+ end
15
+ end
@@ -10,6 +10,9 @@ module GoodData
10
10
  module LCM2
11
11
  class Helpers
12
12
  class << self
13
+ ABORT_ON_ERROR_PARAM = 'abort_on_error'.to_sym
14
+ COLLECT_SYNCED_STATUS = 'collect_synced_status'.to_sym
15
+
13
16
  def check_params(specification, params)
14
17
  specification.keys.each do |param_name|
15
18
  value = params.send(param_name)
@@ -39,6 +42,22 @@ module GoodData
39
42
  end
40
43
  end
41
44
  end
45
+
46
+ def continue_on_error(params)
47
+ params.include?(ABORT_ON_ERROR_PARAM) && !to_bool(ABORT_ON_ERROR_PARAM, params[ABORT_ON_ERROR_PARAM])
48
+ end
49
+
50
+ def collect_synced_status(params)
51
+ params.include?(COLLECT_SYNCED_STATUS) && to_bool(COLLECT_SYNCED_STATUS, params[COLLECT_SYNCED_STATUS])
52
+ end
53
+
54
+ def to_bool(key, value)
55
+ return value if value.is_a?(TrueClass) || value.is_a?(FalseClass)
56
+ return true if value =~ /^(true|t|yes|y|1)$/i
57
+ return false if value == '' || value =~ /^(false|f|no|n|0)$/i
58
+
59
+ raise ArgumentError, "Invalid '#{value}' boolean value for '#{key}' parameter"
60
+ end
42
61
  end
43
62
  end
44
63
  end
@@ -12,6 +12,21 @@ module GoodData
12
12
 
13
13
  class << self
14
14
  def latest_master_project_from_ads(release_table_name, ads_client, segment_id)
15
+ sorted = get_master_project_list_from_ads(release_table_name, ads_client, segment_id)
16
+ sorted.last
17
+ end
18
+
19
+ def latest_master_project_from_nfs(domain_id, data_product_id, segment_id)
20
+ file_path = path_to_release_table_file(domain_id, data_product_id, segment_id)
21
+ sorted = get_master_project_list_from_nfs(domain_id, data_product_id, segment_id)
22
+ latest_master_project = sorted.last
23
+
24
+ version_info = latest_master_project ? "master_pid=#{latest_master_project[:master_project_id]} version=#{latest_master_project[:version]}" : ""
25
+ GoodData.gd_logger.info "Getting latest master project: file=#{file_path} domain=#{domain_id} data_product=#{data_product_id} segment=#{segment_id} #{version_info}"
26
+ latest_master_project
27
+ end
28
+
29
+ def get_master_project_list_from_ads(release_table_name, ads_client, segment_id)
15
30
  replacements = {
16
31
  table_name: release_table_name || DEFAULT_TABLE_NAME,
17
32
  segment_id: segment_id
@@ -22,18 +37,27 @@ module GoodData
22
37
 
23
38
  res = ads_client.execute_select(query)
24
39
  sorted = res.sort_by { |row| row[:version] }
25
- sorted.last
40
+ sorted
26
41
  end
27
42
 
28
- def latest_master_project_from_nfs(domain_id, data_product_id, segment_id)
43
+ def delete_master_project_from_ads(release_table_name, ads_client, segment_id, removal_master_project_ids)
44
+ replacements = {
45
+ table_name: release_table_name || DEFAULT_TABLE_NAME,
46
+ segment_id: segment_id,
47
+ master_project_ids: removal_master_project_ids.map { |x| "'#{x}'" } * ', '
48
+ }
49
+
50
+ path = File.expand_path('../data/delete_from_lcm_release.sql.erb', __dir__)
51
+ query = GoodData::Helpers::ErbHelper.template_file(path, replacements)
52
+
53
+ ads_client.execute(query)
54
+ end
55
+
56
+ def get_master_project_list_from_nfs(domain_id, data_product_id, segment_id)
29
57
  file_path = path_to_release_table_file(domain_id, data_product_id, segment_id)
30
58
  data = GoodData::Helpers::Csv.read_as_hash(file_path)
31
- latest_master_project = data.sort_by { |master| master[:version] }
32
- .reverse.first
33
-
34
- version_info = latest_master_project ? "master_pid=#{latest_master_project[:master_project_id]} version=#{latest_master_project[:version]}" : ""
35
- GoodData.gd_logger.info "Getting latest master project: file=#{file_path} domain=#{domain_id} data_product=#{data_product_id} segment=#{segment_id} #{version_info}"
36
- latest_master_project
59
+ sorted = data.sort_by { |master| master[:version] }
60
+ sorted
37
61
  end
38
62
 
39
63
  def update_latest_master_to_nfs(domain_id, data_product_id, segment_id, master_pid, version)
@@ -46,6 +70,16 @@ module GoodData
46
70
  )
47
71
  end
48
72
 
73
+ def update_master_project_to_nfs(domain_id, data_product_id, segment_id, data)
74
+ file_path = path_to_release_table_file(domain_id, data_product_id, segment_id)
75
+ FileUtils.mkpath(file_path.split('/')[0...-1].join('/'))
76
+ CSV.open(file_path, 'w', write_headers: true, headers: data.first.keys) do |csv|
77
+ data.each do |r|
78
+ csv << r.values
79
+ end
80
+ end
81
+ end
82
+
49
83
  def path_to_release_table_file(domain_id, data_prod_id, segment_id)
50
84
  nsf_directory = ENV['RELEASE_TABLE_NFS_DIRECTORY'] || DEFAULT_NFS_DIRECTORY
51
85
  [nsf_directory, domain_id, data_prod_id + '-' + segment_id + '.csv'].join('/')