gooddata 2.1.19 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. checksums.yaml +5 -5
  2. data/.gdc-ii-config.yaml +1 -1
  3. data/.github/workflows/build.yml +66 -0
  4. data/.github/workflows/pre-merge.yml +72 -0
  5. data/CHANGELOG.md +38 -0
  6. data/Dockerfile +21 -14
  7. data/Dockerfile.jruby +1 -11
  8. data/LICENSE +4409 -16
  9. data/README.md +1 -2
  10. data/SDK_VERSION +1 -1
  11. data/VERSION +1 -1
  12. data/ci/mssql/pom.xml +62 -0
  13. data/ci/mysql/pom.xml +57 -0
  14. data/ci/redshift/pom.xml +1 -1
  15. data/docker-compose.lcm.yml +0 -3
  16. data/gooddata.gemspec +2 -1
  17. data/k8s/charts/lcm-bricks/Chart.yaml +1 -1
  18. data/lcm.rake +2 -8
  19. data/lib/gooddata/bricks/middleware/aws_middleware.rb +35 -9
  20. data/lib/gooddata/cloud_resources/blobstorage/blobstorage_client.rb +98 -0
  21. data/lib/gooddata/cloud_resources/mssql/drivers/.gitkeepme +0 -0
  22. data/lib/gooddata/cloud_resources/mssql/mssql_client.rb +122 -0
  23. data/lib/gooddata/cloud_resources/mysql/drivers/.gitkeepme +0 -0
  24. data/lib/gooddata/cloud_resources/mysql/mysql_client.rb +111 -0
  25. data/lib/gooddata/cloud_resources/postgresql/postgresql_client.rb +0 -1
  26. data/lib/gooddata/cloud_resources/snowflake/snowflake_client.rb +18 -1
  27. data/lib/gooddata/helpers/data_helper.rb +9 -4
  28. data/lib/gooddata/lcm/actions/collect_meta.rb +3 -1
  29. data/lib/gooddata/lcm/actions/migrate_gdc_date_dimension.rb +3 -2
  30. data/lib/gooddata/lcm/actions/synchronize_clients.rb +56 -7
  31. data/lib/gooddata/lcm/actions/synchronize_dataset_mappings.rb +64 -0
  32. data/lib/gooddata/lcm/actions/synchronize_ldm.rb +19 -8
  33. data/lib/gooddata/lcm/actions/synchronize_user_filters.rb +12 -9
  34. data/lib/gooddata/lcm/actions/update_metric_formats.rb +185 -0
  35. data/lib/gooddata/lcm/data/delete_from_lcm_release.sql.erb +5 -0
  36. data/lib/gooddata/lcm/helpers/release_table_helper.rb +42 -8
  37. data/lib/gooddata/lcm/lcm2.rb +5 -0
  38. data/lib/gooddata/mixins/md_object_query.rb +1 -0
  39. data/lib/gooddata/models/data_source.rb +5 -1
  40. data/lib/gooddata/models/dataset_mapping.rb +36 -0
  41. data/lib/gooddata/models/metadata/label.rb +26 -27
  42. data/lib/gooddata/models/project.rb +34 -9
  43. data/lib/gooddata/models/schedule.rb +13 -1
  44. data/lib/gooddata/models/user_filters/user_filter_builder.rb +58 -53
  45. data/lib/gooddata/rest/phmap.rb +1 -0
  46. metadata +45 -18
  47. data/lib/gooddata/bricks/middleware/bulk_salesforce_middleware.rb +0 -37
@@ -44,10 +44,14 @@ module GoodData
44
44
  realize_link
45
45
  when 's3'
46
46
  realize_s3(params)
47
- when 'redshift', 'snowflake', 'bigquery', 'postgresql'
47
+ when 'redshift', 'snowflake', 'bigquery', 'postgresql', 'mssql', 'mysql'
48
48
  raise GoodData::InvalidEnvError, "DataSource does not support type \"#{source}\" on the platform #{RUBY_PLATFORM}" unless RUBY_PLATFORM =~ /java/
49
49
  require_relative '../cloud_resources/cloud_resources'
50
50
  realize_cloud_resource(source, params)
51
+ when 'blobStorage'
52
+ require_relative '../cloud_resources/blobstorage/blobstorage_client'
53
+ blob_storage_client = GoodData::BlobStorageClient.new(params)
54
+ blob_storage_client.realize_blob(@options[:file], params)
51
55
  else
52
56
  raise "DataSource does not support type \"#{source}\""
53
57
  end
@@ -111,12 +115,13 @@ module GoodData
111
115
  end
112
116
 
113
117
  def realize_s3(params)
114
- s3_client = params['aws_client'] && params['aws_client']['s3_client']
118
+ s3_client = params['s3_client'] && params['s3_client']['client']
115
119
  raise 'AWS client not present. Perhaps S3Middleware is missing in the brick definition?' if !s3_client || !s3_client.respond_to?(:bucket)
116
120
  bucket_name = @options[:bucket]
117
- key = @options[:key]
121
+ key = @options[:key].present? ? @options[:key] : @options[:file]
118
122
  raise 'Key "bucket" is missing in S3 datasource' if bucket_name.blank?
119
- raise 'Key "key" is missing in S3 datasource' if key.blank?
123
+ raise 'Key "key" or "file" is missing in S3 datasource' if key.blank?
124
+
120
125
  GoodData.logger.info("Realizing download from S3. Bucket #{bucket_name}, object with key #{key}.")
121
126
  filename = Digest::SHA256.new.hexdigest(@options.to_json)
122
127
  bucket = s3_client.bucket(bucket_name)
@@ -53,7 +53,9 @@ module GoodData
53
53
  client: development_client
54
54
  )
55
55
  kpi_dashboards = MdObject.query('analyticalDashboard', MdObject, client: development_client, project: from_project)
56
- objects = old_dashboards.to_a + kpi_dashboards.to_a
56
+ kpi_dashboard_plugin = MdObject.query('dashboardPlugin', MdObject, client: development_client, project: from_project)
57
+ kpi_date_filter_config = MdObject.query('dateFilterConfig', MdObject, client: development_client, project: from_project)
58
+ objects = old_dashboards.to_a + kpi_dashboards.to_a + kpi_dashboard_plugin.to_a + kpi_date_filter_config.to_a
57
59
  else
58
60
  objects = GoodData::Dashboard.find_by_tag(
59
61
  production_tags,
@@ -50,6 +50,7 @@ module GoodData
50
50
  segment_info[:to].pmap do |entry|
51
51
  pid = entry[:pid]
52
52
  to_project = client.projects(pid) || fail("Invalid 'to' project specified - '#{pid}'")
53
+ GoodData.logger.info "Migrating date dimension, project: '#{to_project.title}', PID: #{pid}"
53
54
  to_blueprint = to_project.blueprint
54
55
  upgrade_datasets = get_upgrade_dates(latest_blueprint, to_blueprint)
55
56
  next if upgrade_datasets.empty?
@@ -71,9 +72,9 @@ module GoodData
71
72
  dest_dates = get_date_dimensions(dest_blueprint) if dest_blueprint
72
73
  src_dates = get_date_dimensions(src_blueprint) if src_blueprint
73
74
 
74
- return false if dest_dates.empty? || src_dates.empty?
75
-
76
75
  upgrade_datasets = []
76
+ return upgrade_datasets if dest_dates.empty? || src_dates.empty?
77
+
77
78
  dest_dates.each do |dest|
78
79
  src_dim = get_date_dimension(src_blueprint, dest[:id])
79
80
  next unless src_dim
@@ -33,6 +33,9 @@ module GoodData
33
33
  description 'ADS Client'
34
34
  param :ads_client, instance_of(Type::AdsClientType), required: false
35
35
 
36
+ description 'Keep number of old master workspace excluding the latest one'
37
+ param :keep_only_previous_masters_count, instance_of(Type::StringType), required: false, default: '-1'
38
+
36
39
  description 'Additional Hidden Parameters'
37
40
  param :additional_hidden_params, instance_of(Type::HashType), required: false
38
41
  end
@@ -53,6 +56,7 @@ module GoodData
53
56
  domain = client.domain(domain_name) || fail("Invalid domain name specified - #{domain_name}")
54
57
  data_product = params.data_product
55
58
  domain_segments = domain.segments(:all, data_product)
59
+ keep_only_previous_masters_count = Integer(params.keep_only_previous_masters_count || "-1")
56
60
 
57
61
  segments = params.segments.map do |seg|
58
62
  domain_segments.find do |s|
@@ -62,18 +66,14 @@ module GoodData
62
66
 
63
67
  results = segments.map do |segment|
64
68
  if params.ads_client
65
- current_master = GoodData::LCM2::Helpers.latest_master_project_from_ads(
66
- params.release_table_name,
67
- params.ads_client,
68
- segment.segment_id
69
- )
69
+ master_projects = GoodData::LCM2::Helpers.get_master_project_list_from_ads(params.release_table_name, params.ads_client, segment.segment_id)
70
70
  else
71
- current_master = GoodData::LCM2::Helpers.latest_master_project_from_nfs(domain_name, data_product.data_product_id, segment.segment_id)
71
+ master_projects = GoodData::LCM2::Helpers.get_master_project_list_from_nfs(domain_name, data_product.data_product_id, segment.segment_id)
72
72
  end
73
73
 
74
+ current_master = master_projects.last
74
75
  # TODO: Check res.first.nil? || res.first[:master_project_id].nil?
75
76
  master = client.projects(current_master[:master_project_id])
76
-
77
77
  segment.master_project = master
78
78
  segment.save
79
79
 
@@ -87,6 +87,19 @@ module GoodData
87
87
  "Details: #{sync_result['links']['details']}")
88
88
  end
89
89
 
90
+ if keep_only_previous_masters_count >= 0
91
+ number_of_deleted_projects = master_projects.count - (keep_only_previous_masters_count + 1)
92
+
93
+ if number_of_deleted_projects.positive?
94
+ begin
95
+ removal_master_project_ids = remove_multiple_workspace(params, segment.segment_id, master_projects, number_of_deleted_projects)
96
+ remove_old_workspaces_from_release_table(params, domain_name, data_product.data_product_id, segment.segment_id, master_projects, removal_master_project_ids)
97
+ rescue Exception => e # rubocop:disable RescueException
98
+ GoodData.logger.error "Problem occurs when removing old master workspace, reason: #{e.message}"
99
+ end
100
+ end
101
+ end
102
+
90
103
  {
91
104
  segment: segment.id,
92
105
  master_pid: master.pid,
@@ -98,6 +111,42 @@ module GoodData
98
111
  # Return results
99
112
  results
100
113
  end
114
+
115
+ def remove_multiple_workspace(params, segment_id, master_projects, number_of_deleted_projects)
116
+ removal_master_project_ids = []
117
+ need_to_delete_projects = master_projects.take(number_of_deleted_projects)
118
+
119
+ need_to_delete_projects.each do |project_wrapper|
120
+ master_project_id = project_wrapper[:master_project_id]
121
+ next if master_project_id.to_s.empty?
122
+
123
+ begin
124
+ project = params.gdc_gd_client.projects(master_project_id)
125
+ if project && !%w[deleted archived].include?(project.state.to_s)
126
+ GoodData.logger.info "Segment #{segment_id}: Deleting old master workspace, project: '#{project.title}', PID: (#{project.pid})."
127
+ project.delete
128
+ end
129
+ removal_master_project_ids << master_project_id
130
+ master_projects.delete_if { |p| p[:master_project_id] == master_project_id }
131
+ rescue Exception => ex # rubocop:disable RescueException
132
+ GoodData.logger.error "Unable to remove master workspace: '#{master_project_id}', Error: #{ex.message}"
133
+ end
134
+ end
135
+ removal_master_project_ids
136
+ end
137
+
138
+ # rubocop:disable Metrics/ParameterLists
139
+ def remove_old_workspaces_from_release_table(params, domain_id, data_product_id, segment_id, master_projects, removal_master_project_ids)
140
+ unless removal_master_project_ids.empty?
141
+ if params.ads_client
142
+ GoodData::LCM2::Helpers.delete_master_project_from_ads(params.release_table_name, params.ads_client, segment_id, removal_master_project_ids)
143
+ else
144
+ data = master_projects.sort_by { |master| master[:version] }
145
+ GoodData::LCM2::Helpers.update_master_project_to_nfs(domain_id, data_product_id, segment_id, data)
146
+ end
147
+ end
148
+ end
149
+ # rubocop:enable Metrics/ParameterLists
101
150
  end
102
151
  end
103
152
  end
@@ -0,0 +1,64 @@
1
+ # encoding: UTF-8
2
+ # frozen_string_literal: true
3
+ #
4
+ # Copyright (c) 2010-2021 GoodData Corporation. All rights reserved.
5
+ # This source code is licensed under the BSD-style license found in the
6
+ # LICENSE file in the root directory of this source tree.
7
+
8
+ require_relative 'base_action'
9
+
10
+ module GoodData
11
+ module LCM2
12
+ class SynchronizeDataSetMapping < BaseAction
13
+ DESCRIPTION = 'Synchronize Dataset Mappings'
14
+
15
+ PARAMS = define_params(self) do
16
+ description 'Client Used for Connecting to GD'
17
+ param :gdc_gd_client, instance_of(Type::GdClientType), required: true
18
+
19
+ description 'Client used to connecting to development domain'
20
+ param :development_client, instance_of(Type::GdClientType), required: true
21
+
22
+ description 'Synchronization Info'
23
+ param :synchronize, array_of(instance_of(Type::SynchronizationInfoType)), required: true, generated: true
24
+
25
+ description 'Logger'
26
+ param :gdc_logger, instance_of(Type::GdLogger), required: true
27
+ end
28
+
29
+ RESULT_HEADER = %i[from to count status]
30
+
31
+ class << self
32
+ def call(params)
33
+ results = []
34
+
35
+ client = params.gdc_gd_client
36
+ development_client = params.development_client
37
+
38
+ params.synchronize.peach do |info|
39
+ from_project = info.from
40
+ to_projects = info.to
41
+
42
+ from = development_client.projects(from_project) || fail("Invalid 'from' project specified - '#{from_project}'")
43
+ dataset_mapping = from.dataset_mapping
44
+ if dataset_mapping&.dig('datasetMappings', 'items').nil? || dataset_mapping['datasetMappings']['items'].empty?
45
+ params.gdc_logger.info "Project: '#{from.title}', PID: '#{from.pid}' has no model mapping, skip synchronizing model mapping."
46
+ else
47
+ to_projects.peach do |to|
48
+ pid = to[:pid]
49
+ to_project = client.projects(pid) || fail("Invalid 'to' project specified - '#{pid}'")
50
+
51
+ params.gdc_logger.info "Transferring model mapping, from project: '#{from.title}', PID: '#{from.pid}', to project: '#{to_project.title}', PID: '#{to_project.pid}'"
52
+ res = to_project.update_dataset_mapping(dataset_mapping)
53
+ res[:from] = from.pid
54
+ results << res
55
+ end
56
+ end
57
+ end
58
+ # Return results
59
+ results.flatten
60
+ end
61
+ end
62
+ end
63
+ end
64
+ end
@@ -83,18 +83,29 @@ module GoodData
83
83
  segment_info[:from_blueprint] = blueprint
84
84
  maql_diff = nil
85
85
  previous_master = segment_info[:previous_master]
86
+ synchronize_ldm_mode = params[:synchronize_ldm].downcase
86
87
  diff_against_master = %w(diff_against_master_with_fallback diff_against_master)
87
- .include?(params[:synchronize_ldm].downcase)
88
- GoodData.logger.info "Synchronize LDM mode: '#{params[:synchronize_ldm].downcase}'"
89
- if previous_master && diff_against_master
90
- maql_diff_params = [:includeGrain]
91
- maql_diff_params << :excludeFactRule if exclude_fact_rule
92
- maql_diff_params << :includeDeprecated if include_deprecated
93
- maql_diff = previous_master.maql_diff(blueprint: blueprint, params: maql_diff_params)
88
+ .include?(synchronize_ldm_mode)
89
+ GoodData.logger.info "Synchronize LDM mode: '#{synchronize_ldm_mode}'"
90
+ if segment_info.key?(:previous_master) && diff_against_master
91
+ if previous_master
92
+ maql_diff_params = [:includeGrain]
93
+ maql_diff_params << :excludeFactRule if exclude_fact_rule
94
+ maql_diff_params << :includeDeprecated if include_deprecated
95
+ maql_diff = previous_master.maql_diff(blueprint: blueprint, params: maql_diff_params)
96
+ else
97
+ maql_diff = {
98
+ "projectModelDiff" =>
99
+ {
100
+ "updateOperations" => [],
101
+ "updateScripts" => []
102
+ }
103
+ }
104
+ end
94
105
  chunks = maql_diff['projectModelDiff']['updateScripts']
95
106
  if chunks.empty?
96
107
  GoodData.logger.info "Synchronize LDM to clients will not proceed in mode \
97
- '#{params[:synchronize_ldm].downcase}' due to no LDM changes in the new master project. \
108
+ '#{synchronize_ldm_mode}' due to no LDM changes in the segment master project. \
98
109
  If you had changed LDM of clients manually, please use mode 'diff_against_clients' \
99
110
  to force synchronize LDM to clients"
100
111
  end
@@ -124,6 +124,7 @@ module GoodData
124
124
  GoodData.gd_logger.info("Synchronizing in mode=#{mode}, number_of_clients=#{all_clients.size}, data_rows=#{user_filters.size}")
125
125
 
126
126
  GoodData.logger.info("Synchronizing in mode \"#{mode}\"")
127
+ results = []
127
128
  case mode
128
129
  when 'sync_project', 'sync_one_project_based_on_pid', 'sync_one_project_based_on_custom_id'
129
130
  if mode == 'sync_one_project_based_on_pid'
@@ -134,7 +135,9 @@ module GoodData
134
135
  user_filters = user_filters.select { |f| f[:pid] == filter } if filter
135
136
 
136
137
  GoodData.gd_logger.info("Synchronizing in mode=#{mode}, project_id=#{project.pid}, data_rows=#{user_filters.size}")
137
- sync_user_filters(project, user_filters, run_params, symbolized_config)
138
+ current_results = sync_user_filters(project, user_filters, run_params, symbolized_config)
139
+
140
+ results.concat(current_results[:results]) unless current_results.nil? || current_results[:results].empty?
138
141
  when 'sync_multiple_projects_based_on_pid', 'sync_multiple_projects_based_on_custom_id'
139
142
  users_by_project = run_params[:users_brick_input].group_by { |u| u[:pid] }
140
143
  user_filters.group_by { |u| u[:pid] }.flat_map.pmap do |id, new_filters|
@@ -149,7 +152,9 @@ module GoodData
149
152
  end
150
153
 
151
154
  GoodData.gd_logger.info("Synchronizing in mode=#{mode}, project_id=#{id}, data_rows=#{new_filters.size}")
152
- sync_user_filters(current_project, new_filters, run_params.merge(users_brick_input: users), symbolized_config)
155
+ current_results = sync_user_filters(current_project, new_filters, run_params.merge(users_brick_input: users), symbolized_config)
156
+
157
+ results.concat(current_results[:results]) unless current_results.nil? || current_results[:results].empty?
153
158
  end
154
159
  when 'sync_domain_client_workspaces'
155
160
  domain_clients = all_clients
@@ -161,7 +166,6 @@ module GoodData
161
166
  working_client_ids = []
162
167
 
163
168
  users_by_project = run_params[:users_brick_input].group_by { |u| u[:pid] }
164
- results = []
165
169
  user_filters.group_by { |u| u[multiple_projects_column] }.flat_map.pmap do |client_id, new_filters|
166
170
  users = users_by_project[client_id]
167
171
  fail "Client id cannot be empty" if client_id.blank?
@@ -182,7 +186,7 @@ module GoodData
182
186
 
183
187
  GoodData.gd_logger.info("Synchronizing in mode=#{mode}, client_id=#{client_id}, data_rows=#{new_filters.size}")
184
188
  partial_results = sync_user_filters(current_project, new_filters, run_params.merge(users_brick_input: users), symbolized_config)
185
- results.concat(partial_results[:results])
189
+ results.concat(partial_results[:results]) unless partial_results.nil? || partial_results[:results].empty?
186
190
  end
187
191
 
188
192
  unless run_params[:do_not_touch_filters_that_are_not_mentioned]
@@ -197,17 +201,16 @@ module GoodData
197
201
  GoodData.gd_logger.info("Delete all filters in project_id=#{current_project.pid}, client_id=#{c.client_id}")
198
202
  current_results = sync_user_filters(current_project, [], run_params.merge(users_brick_input: users), symbolized_config)
199
203
 
200
- results.concat(current_results[:results])
204
+ results.concat(current_results[:results]) unless current_results.nil? || current_results[:results].empty?
201
205
  rescue StandardError => e
202
206
  params.gdc_logger.error "Failed to clear filters of #{c.client_id} due to: #{e.inspect}"
203
207
  end
204
208
  end
205
209
  end
206
-
207
- {
208
- results: results
209
- }
210
210
  end
211
+ {
212
+ results: results
213
+ }
211
214
  end
212
215
 
213
216
  def sync_user_filters(project, filters, params, filters_config)
@@ -0,0 +1,185 @@
1
+ # encoding: UTF-8
2
+ # frozen_string_literal: true
3
+ # Copyright (c) 2010-2021 GoodData Corporation. All rights reserved.
4
+ # This source code is licensed under the BSD-style license found in the
5
+ # LICENSE file in the root directory of this source tree.
6
+
7
+ require_relative 'base_action'
8
+
9
+ module GoodData
10
+ module LCM2
11
+ class UpdateMetricFormats < BaseAction
12
+ DESCRIPTION = 'Localize Metric Formats'
13
+
14
+ PARAMS = define_params(self) do
15
+ description 'Synchronization Info'
16
+ param :synchronize, array_of(instance_of(Type::SynchronizationInfoType)), required: true, generated: true
17
+
18
+ description 'Client Used for Connecting to GD'
19
+ param :gdc_gd_client, instance_of(Type::GdClientType), required: true
20
+
21
+ description 'Organization Name'
22
+ param :organization, instance_of(Type::StringType), required: false
23
+
24
+ description 'DataProduct to manage'
25
+ param :data_product, instance_of(Type::GDDataProductType), required: false
26
+
27
+ description 'Logger'
28
+ param :gdc_logger, instance_of(Type::GdLogger), required: true
29
+
30
+ description 'ADS Client'
31
+ param :ads_client, instance_of(Type::AdsClientType), required: false
32
+
33
+ description 'Input Source'
34
+ param :input_source, instance_of(Type::HashType), required: false
35
+
36
+ description 'Localization query'
37
+ param :localization_query, instance_of(Type::StringType), required: false
38
+ end
39
+
40
+ RESULT_HEADER = %i[action ok_clients error_clients]
41
+
42
+ class << self
43
+ def load_metric_data(params)
44
+ if params&.dig(:input_source, :metric_format) && params[:input_source][:metric_format].present?
45
+ metric_input_source = validate_input_source(params[:input_source])
46
+ else
47
+ return nil
48
+ end
49
+
50
+ metric_data_source = GoodData::Helpers::DataSource.new(metric_input_source)
51
+ begin
52
+ temp_csv = without_check(PARAMS, params) do
53
+ File.open(metric_data_source.realize(params), 'r:UTF-8')
54
+ end
55
+ rescue StandardError => e
56
+ GoodData.logger.warn("Unable to get metric input source, skip updating metric formats. Error: #{e.message} - #{e}")
57
+ return nil
58
+ end
59
+
60
+ metrics_hash = GoodData::Helpers::Csv.read_as_hash temp_csv
61
+ return nil if metrics_hash.empty?
62
+
63
+ expected_keys = %w[tag client_id format]
64
+ unless expected_keys.map(&:to_sym).all? { |s| metrics_hash.first.key? s }
65
+ GoodData.logger.warn("The input metric data is incorrect, expecting the following fields: #{expected_keys}")
66
+ return nil
67
+ end
68
+ metrics_hash
69
+ end
70
+
71
+ def validate_input_source(input_source)
72
+ type = input_source[:type] if input_source&.dig(:type)
73
+ metric_format = input_source[:metric_format]
74
+ raise "Incorrect configuration: 'type' of 'input_source' is required" if type.blank?
75
+
76
+ modified_input_source = input_source
77
+ case type
78
+ when 'ads', 'redshift', 'snowflake', 'bigquery', 'postgresql', 'mssql', 'mysql'
79
+ if metric_format[:query].blank?
80
+ GoodData.logger.warn("The metric input_source '#{type}' is missing property 'query'")
81
+ return nil
82
+ end
83
+
84
+ modified_input_source[:query] = metric_format[:query]
85
+ return modified_input_source
86
+ when 's3'
87
+ if metric_format[:file].blank?
88
+ GoodData.logger.warn("The metric input_source '#{type}' is missing property 'file'")
89
+ return nil
90
+ end
91
+
92
+ if modified_input_source.key?(:key)
93
+ modified_input_source[:key] = metric_format[:file]
94
+ else
95
+ modified_input_source[:file] = metric_format[:file]
96
+ end
97
+ return modified_input_source
98
+ when 'blobStorage'
99
+ if metric_format[:file].blank?
100
+ GoodData.logger.warn("The metric input_source '#{type}' is missing property 'file'")
101
+ return nil
102
+ end
103
+
104
+ modified_input_source[:file] = metric_format[:file]
105
+ return modified_input_source
106
+ when 'staging'
107
+ if metric_format[:file].blank?
108
+ GoodData.logger.warn("The metric input_source '#{type}' is missing property 'file'")
109
+ return nil
110
+ end
111
+
112
+ modified_input_source[:path] = metric_format[:file]
113
+ return modified_input_source
114
+ when 'web'
115
+ if metric_format[:url].blank?
116
+ GoodData.logger.warn("The metric input_source '#{type}' is missing property 'url'")
117
+ return nil
118
+ end
119
+
120
+ modified_input_source[:url] = metric_format[:url]
121
+ return modified_input_source
122
+ else
123
+ return nil
124
+ end
125
+ end
126
+
127
+ def get_clients_metrics(metric_data)
128
+ return {} if metric_data.nil?
129
+
130
+ metric_groups = {}
131
+ clients = metric_data.map { |row| row[:client_id] }.uniq
132
+ clients.each do |client|
133
+ next if client.blank?
134
+
135
+ formats = {}
136
+ metric_data.select { |row| row[:client_id] == client && row[:tag].present? && row[:format].present? }.each { |row| formats[row[:tag]] = row[:format] }
137
+ metric_groups[client.to_s] ||= formats
138
+ end
139
+ metric_groups
140
+ end
141
+
142
+ def call(params)
143
+ data = load_metric_data(params)
144
+ result = []
145
+ return result if data.nil?
146
+
147
+ metric_group = get_clients_metrics(data)
148
+ return result if metric_group.empty?
149
+
150
+ GoodData.logger.debug("Clients have metrics which will be modified: #{metric_group.keys}")
151
+ updated_clients = params.synchronize.map { |segment| segment.to.map { |client| client[:client_id] } }.flatten.uniq
152
+ GoodData.logger.debug("Updating clients: #{updated_clients}")
153
+ data_product = params.data_product
154
+ data_product_clients = data_product.clients
155
+ number_client_ok = 0
156
+ number_client_error = 0
157
+ metric_group.each do |client_id, formats|
158
+ next unless updated_clients.include?(client_id)
159
+
160
+ client = data_product_clients.find { |c| c.id == client_id }
161
+ begin
162
+ GoodData.logger.info("Start updating metric format for client: '#{client_id}'")
163
+ metrics = client.project.metrics.to_a
164
+ formats.each do |tag, format|
165
+ next if tag.blank? || format.blank?
166
+
167
+ metrics_to_be_updated = metrics.select { |metric| metric.tags.include?(tag) }
168
+ metrics_to_be_updated.each do |metric|
169
+ metric.format = format
170
+ metric.save
171
+ end
172
+ end
173
+ number_client_ok += 1
174
+ GoodData.logger.info("Finished updating metric format for client: '#{client_id}'")
175
+ rescue StandardError => e
176
+ number_client_error += 1
177
+ GoodData.logger.warn("Failed to update metric format for client: '#{client_id}'. Error: #{e.message} - #{e}")
178
+ end
179
+ end
180
+ [{ :action => 'Update metric format', :ok_clients => number_client_ok, :error_clients => number_client_error }]
181
+ end
182
+ end
183
+ end
184
+ end
185
+ end
@@ -0,0 +1,5 @@
1
+ DELETE FROM "<%= table_name || 'LCM_RELEASE' %>"
2
+ WHERE
3
+ segment_id = '<%= segment_id %>'
4
+ AND master_project_id IN (<%= master_project_ids %>)
5
+ ;
@@ -12,6 +12,21 @@ module GoodData
12
12
 
13
13
  class << self
14
14
  def latest_master_project_from_ads(release_table_name, ads_client, segment_id)
15
+ sorted = get_master_project_list_from_ads(release_table_name, ads_client, segment_id)
16
+ sorted.last
17
+ end
18
+
19
+ def latest_master_project_from_nfs(domain_id, data_product_id, segment_id)
20
+ file_path = path_to_release_table_file(domain_id, data_product_id, segment_id)
21
+ sorted = get_master_project_list_from_nfs(domain_id, data_product_id, segment_id)
22
+ latest_master_project = sorted.last
23
+
24
+ version_info = latest_master_project ? "master_pid=#{latest_master_project[:master_project_id]} version=#{latest_master_project[:version]}" : ""
25
+ GoodData.gd_logger.info "Getting latest master project: file=#{file_path} domain=#{domain_id} data_product=#{data_product_id} segment=#{segment_id} #{version_info}"
26
+ latest_master_project
27
+ end
28
+
29
+ def get_master_project_list_from_ads(release_table_name, ads_client, segment_id)
15
30
  replacements = {
16
31
  table_name: release_table_name || DEFAULT_TABLE_NAME,
17
32
  segment_id: segment_id
@@ -22,18 +37,27 @@ module GoodData
22
37
 
23
38
  res = ads_client.execute_select(query)
24
39
  sorted = res.sort_by { |row| row[:version] }
25
- sorted.last
40
+ sorted
26
41
  end
27
42
 
28
- def latest_master_project_from_nfs(domain_id, data_product_id, segment_id)
43
+ def delete_master_project_from_ads(release_table_name, ads_client, segment_id, removal_master_project_ids)
44
+ replacements = {
45
+ table_name: release_table_name || DEFAULT_TABLE_NAME,
46
+ segment_id: segment_id,
47
+ master_project_ids: removal_master_project_ids.map { |x| "'#{x}'" } * ', '
48
+ }
49
+
50
+ path = File.expand_path('../data/delete_from_lcm_release.sql.erb', __dir__)
51
+ query = GoodData::Helpers::ErbHelper.template_file(path, replacements)
52
+
53
+ ads_client.execute(query)
54
+ end
55
+
56
+ def get_master_project_list_from_nfs(domain_id, data_product_id, segment_id)
29
57
  file_path = path_to_release_table_file(domain_id, data_product_id, segment_id)
30
58
  data = GoodData::Helpers::Csv.read_as_hash(file_path)
31
- latest_master_project = data.sort_by { |master| master[:version] }
32
- .reverse.first
33
-
34
- version_info = latest_master_project ? "master_pid=#{latest_master_project[:master_project_id]} version=#{latest_master_project[:version]}" : ""
35
- GoodData.gd_logger.info "Getting latest master project: file=#{file_path} domain=#{domain_id} data_product=#{data_product_id} segment=#{segment_id} #{version_info}"
36
- latest_master_project
59
+ sorted = data.sort_by { |master| master[:version] }
60
+ sorted
37
61
  end
38
62
 
39
63
  def update_latest_master_to_nfs(domain_id, data_product_id, segment_id, master_pid, version)
@@ -46,6 +70,16 @@ module GoodData
46
70
  )
47
71
  end
48
72
 
73
+ def update_master_project_to_nfs(domain_id, data_product_id, segment_id, data)
74
+ file_path = path_to_release_table_file(domain_id, data_product_id, segment_id)
75
+ FileUtils.mkpath(file_path.split('/')[0...-1].join('/'))
76
+ CSV.open(file_path, 'w', write_headers: true, headers: data.first.keys) do |csv|
77
+ data.each do |r|
78
+ csv << r.values
79
+ end
80
+ end
81
+ end
82
+
49
83
  def path_to_release_table_file(domain_id, data_prod_id, segment_id)
50
84
  nsf_directory = ENV['RELEASE_TABLE_NFS_DIRECTORY'] || DEFAULT_NFS_DIRECTORY
51
85
  [nsf_directory, domain_id, data_prod_id + '-' + segment_id + '.csv'].join('/')
@@ -96,6 +96,7 @@ module GoodData
96
96
  CollectComputedAttributeMetrics,
97
97
  ImportObjectCollections,
98
98
  SynchronizeComputedAttributes,
99
+ SynchronizeDataSetMapping,
99
100
  SynchronizeProcesses,
100
101
  SynchronizeSchedules,
101
102
  SynchronizeColorPalette,
@@ -122,9 +123,11 @@ module GoodData
122
123
  AssociateClients,
123
124
  RenameExistingClientProjects,
124
125
  ProvisionClients,
126
+ UpdateMetricFormats,
125
127
  EnsureTechnicalUsersDomain,
126
128
  EnsureTechnicalUsersProject,
127
129
  CollectDymanicScheduleParams,
130
+ SynchronizeDataSetMapping,
128
131
  SynchronizeETLsInSegment
129
132
  ],
130
133
 
@@ -136,8 +139,10 @@ module GoodData
136
139
  EnsureTechnicalUsersDomain,
137
140
  EnsureTechnicalUsersProject,
138
141
  SynchronizeLdm,
142
+ SynchronizeDataSetMapping,
139
143
  MigrateGdcDateDimension,
140
144
  SynchronizeClients,
145
+ UpdateMetricFormats,
141
146
  SynchronizeComputedAttributes,
142
147
  CollectDymanicScheduleParams,
143
148
  SynchronizeETLsInSegment
@@ -54,6 +54,7 @@ module GoodData
54
54
  y << (klass ? client.create(klass, item, project: project) : item)
55
55
  end
56
56
  break if result['objects']['paging']['count'] < page_limit
57
+
57
58
  offset += page_limit
58
59
  end
59
60
  end
@@ -34,7 +34,7 @@ module GoodData
34
34
  c.create(DataSource, ds_data)
35
35
  end
36
36
  else
37
- c.create(DataSource, c.get("#{DATA_SOURCES_URL}/#{id}"))
37
+ c.create(DataSource, c.get(DATA_SOURCES_URL + '/' + id))
38
38
  end
39
39
  end
40
40
 
@@ -177,6 +177,10 @@ module GoodData
177
177
  @json['dataSource']['connectionInfo'][type]
178
178
  end
179
179
 
180
+ def type
181
+ @json['dataSource']['connectionInfo'].first[0].upcase
182
+ end
183
+
180
184
  private
181
185
 
182
186
  def build_connection_info