gooddata 2.1.8-java → 2.1.13-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +7 -0
  3. data/.travis.yml +2 -4
  4. data/CHANGELOG.md +43 -0
  5. data/Dockerfile +19 -4
  6. data/Dockerfile.jruby +4 -4
  7. data/Dockerfile.ruby +5 -4
  8. data/README.md +2 -0
  9. data/SDK_VERSION +1 -1
  10. data/VERSION +1 -1
  11. data/bin/provision.sh +2 -0
  12. data/bin/release.sh +2 -0
  13. data/bin/rollout.sh +2 -0
  14. data/bin/run_brick.rb +31 -7
  15. data/bin/test_projects_cleanup.rb +10 -2
  16. data/bin/user_filters.sh +2 -0
  17. data/ci.rake +1 -1
  18. data/ci/bigquery/pom.xml +54 -0
  19. data/ci/redshift/pom.xml +73 -0
  20. data/ci/snowflake/pom.xml +57 -0
  21. data/dev-gooddata-sso.pub.encrypted +40 -40
  22. data/gdc_fossa_lcm.yaml +2 -0
  23. data/gdc_fossa_ruby_sdk.yaml +4 -0
  24. data/gooddata.gemspec +6 -2
  25. data/k8s/charts/lcm-bricks/Chart.yaml +1 -1
  26. data/k8s/charts/lcm-bricks/templates/prometheus/alertingRules.yaml +22 -12
  27. data/lcm.rake +14 -0
  28. data/lib/gooddata/bricks/middleware/execution_result_middleware.rb +68 -0
  29. data/lib/gooddata/bricks/middleware/logger_middleware.rb +2 -1
  30. data/lib/gooddata/bricks/middleware/mask_logger_decorator.rb +5 -1
  31. data/lib/gooddata/bricks/pipeline.rb +7 -0
  32. data/lib/gooddata/cloud_resources/bigquery/bigquery_client.rb +86 -0
  33. data/lib/gooddata/cloud_resources/bigquery/drivers/.gitkeepme +0 -0
  34. data/lib/gooddata/cloud_resources/cloud_resouce_factory.rb +28 -0
  35. data/lib/gooddata/cloud_resources/cloud_resource_client.rb +24 -0
  36. data/lib/gooddata/cloud_resources/cloud_resources.rb +12 -0
  37. data/lib/gooddata/cloud_resources/redshift/drivers/log4j.properties +15 -0
  38. data/lib/gooddata/cloud_resources/redshift/redshift_client.rb +101 -0
  39. data/lib/gooddata/cloud_resources/snowflake/drivers/.gitkeepme +0 -0
  40. data/lib/gooddata/cloud_resources/snowflake/snowflake_client.rb +84 -0
  41. data/lib/gooddata/exceptions/invalid_env_error.rb +15 -0
  42. data/lib/gooddata/helpers/data_helper.rb +10 -0
  43. data/lib/gooddata/helpers/data_source_helpers.rb +47 -0
  44. data/lib/gooddata/helpers/global_helpers.rb +4 -0
  45. data/lib/gooddata/helpers/global_helpers_params.rb +6 -9
  46. data/lib/gooddata/lcm/actions/collect_clients.rb +6 -6
  47. data/lib/gooddata/lcm/actions/collect_dynamic_schedule_params.rb +6 -6
  48. data/lib/gooddata/lcm/actions/collect_segment_clients.rb +4 -1
  49. data/lib/gooddata/lcm/actions/collect_segments.rb +1 -2
  50. data/lib/gooddata/lcm/actions/collect_users_brick_users.rb +7 -6
  51. data/lib/gooddata/lcm/actions/create_segment_masters.rb +5 -3
  52. data/lib/gooddata/lcm/actions/migrate_gdc_date_dimension.rb +116 -0
  53. data/lib/gooddata/lcm/actions/set_master_project.rb +76 -0
  54. data/lib/gooddata/lcm/actions/synchronize_clients.rb +1 -1
  55. data/lib/gooddata/lcm/actions/synchronize_etls_in_segment.rb +1 -2
  56. data/lib/gooddata/lcm/actions/synchronize_ldm.rb +20 -3
  57. data/lib/gooddata/lcm/actions/synchronize_user_filters.rb +23 -3
  58. data/lib/gooddata/lcm/actions/synchronize_users.rb +50 -30
  59. data/lib/gooddata/lcm/actions/update_release_table.rb +7 -1
  60. data/lib/gooddata/lcm/exceptions/lcm_execution_error.rb +16 -0
  61. data/lib/gooddata/lcm/helpers/release_table_helper.rb +16 -8
  62. data/lib/gooddata/lcm/lcm2.rb +28 -5
  63. data/lib/gooddata/models/domain.rb +17 -15
  64. data/lib/gooddata/models/execution.rb +0 -1
  65. data/lib/gooddata/models/execution_detail.rb +0 -1
  66. data/lib/gooddata/models/from_wire.rb +1 -0
  67. data/lib/gooddata/models/process.rb +11 -3
  68. data/lib/gooddata/models/profile.rb +33 -11
  69. data/lib/gooddata/models/project.rb +120 -31
  70. data/lib/gooddata/models/project_creator.rb +2 -0
  71. data/lib/gooddata/models/schedule.rb +0 -1
  72. data/lib/gooddata/rest/client.rb +2 -2
  73. data/lib/gooddata/rest/connection.rb +5 -3
  74. data/rubydev_public.gpg.encrypted +51 -51
  75. data/rubydev_secret_keys.gpg.encrypted +109 -109
  76. metadata +32 -13
  77. data/lib/gooddata/extensions/hash.rb +0 -18
@@ -38,11 +38,11 @@ module GoodData
38
38
  def call(params)
39
39
  return [] unless params.dynamic_params
40
40
 
41
- schedule_title_column = params.schedule_title_column || 'schedule_title'
42
- client_id_column = params.client_id_column || 'client_id'
43
- param_name_column = params.param_name_column || 'param_name'
44
- param_value_column = params.param_value_column || 'param_value'
45
- param_secure_column = params.param_secure_column || 'param_secure'
41
+ schedule_title_column = params.schedule_title_column&.downcase || 'schedule_title'
42
+ client_id_column = params.client_id_column&.downcase || 'client_id'
43
+ param_name_column = params.param_name_column&.downcase || 'param_name'
44
+ param_value_column = params.param_value_column&.downcase || 'param_value'
45
+ param_secure_column = params.param_secure_column&.downcase || 'param_secure'
46
46
 
47
47
  encryption_key = params.dynamic_params_encryption_key || ''
48
48
  exist_encryption_key = encryption_key.blank? ? false : true
@@ -59,7 +59,7 @@ module GoodData
59
59
  schedule_hidden_params = {}
60
60
  exist_param_secure = false
61
61
 
62
- CSV.foreach(input_data, :headers => true, :return_headers => false, encoding: 'utf-8') do |row|
62
+ CSV.foreach(input_data, :headers => true, :return_headers => false, :header_converters => :downcase, :encoding => 'utf-8') do |row|
63
63
  is_param_secure = row[param_secure_column] == 'true'
64
64
  is_decrypt_secure_value = is_param_secure && exist_encryption_key ? true : false
65
65
  exist_param_secure = true if is_param_secure
@@ -65,8 +65,11 @@ module GoodData
65
65
  segment.segment_id
66
66
  )
67
67
  else
68
- latest_master = GoodData::LCM2::Helpers.latest_master_project_from_nfs(domain_name, segment.segment_id)
68
+ data_product = params.data_product
69
+ data_product_id = data_product.data_product_id
70
+ latest_master = GoodData::LCM2::Helpers.latest_master_project_from_nfs(domain_name, data_product_id, segment.segment_id)
69
71
  end
72
+ raise 'Release table has no data' unless latest_master
70
73
 
71
74
  latest_master = client.projects(latest_master[:master_project_id])
72
75
 
@@ -51,8 +51,7 @@ module GoodData
51
51
  begin
52
52
  project = segment.master_project
53
53
  rescue RestClient::BadRequest => e
54
- params.gdc_logger.error "Failed to retrieve master project for segment #{segment.id}. Error: #{e}"
55
- raise
54
+ raise "Failed to retrieve master project for segment #{segment.id}. Error: #{e}"
56
55
  end
57
56
 
58
57
  raise "Master project for segment #{segment.id} doesn't exist." unless project
@@ -35,7 +35,7 @@ module GoodData
35
35
  class << self
36
36
  def call(params)
37
37
  users_brick_users = []
38
- login_column = params.users_brick_config.login_column || 'login'
38
+ login_column = params.users_brick_config.login_column&.downcase || 'login'
39
39
  users_brick_data_source = GoodData::Helpers::DataSource.new(params.users_brick_config.input_source)
40
40
 
41
41
  users_brick_data_source_file = without_check(PARAMS, params) do
@@ -45,14 +45,15 @@ module GoodData
45
45
  )
46
46
  end
47
47
  CSV.foreach(users_brick_data_source_file,
48
- headers: true,
49
- return_headers: false,
50
- encoding: 'utf-8') do |row|
51
- pid = row[params.multiple_projects_column]
48
+ :headers => true,
49
+ :return_headers => false,
50
+ :header_converters => :downcase,
51
+ :encoding => 'utf-8') do |row|
52
+ pid = row[params.multiple_projects_column&.downcase]
52
53
  fail "The set multiple_projects_column '#{params.multiple_projects_column}' of the users input is empty" if !pid && MULTIPLE_COLUMN_MODES.include?(params.sync_mode)
53
54
 
54
55
  users_brick_users << {
55
- login: row[login_column].downcase,
56
+ login: row[login_column].nil? ? nil : row[login_column].strip.downcase,
56
57
  pid: pid
57
58
  }
58
59
  end
@@ -71,7 +71,7 @@ module GoodData
71
71
  ads_output_stage_prefix = segment_in.ads_output_stage_prefix
72
72
 
73
73
  # Create master project Postgres
74
- version = get_project_version(params, domain_name, segment_id) + 1
74
+ version = get_project_version(params, domain_name, data_product, segment_id) + 1
75
75
 
76
76
  master_name = segment_in.master_name.gsub('#{version}', version.to_s)
77
77
 
@@ -113,6 +113,7 @@ module GoodData
113
113
  status = 'modified'
114
114
  end
115
115
 
116
+ segment_in[:data_product_id] = data_product.data_product_id
116
117
  segment_in[:master_pid] = project.pid
117
118
  segment_in[:version] = version
118
119
  segment_in[:timestamp] = Time.now.utc.iso8601
@@ -150,7 +151,7 @@ module GoodData
150
151
  }
151
152
  end
152
153
 
153
- def get_project_version(params, domain_name, segment_id)
154
+ def get_project_version(params, domain_name, data_product, segment_id)
154
155
  if params.ads_client
155
156
  current_master = GoodData::LCM2::Helpers.latest_master_project_from_ads(
156
157
  params.release_table_name,
@@ -158,7 +159,8 @@ module GoodData
158
159
  segment_id
159
160
  )
160
161
  else
161
- current_master = GoodData::LCM2::Helpers.latest_master_project_from_nfs(domain_name, segment_id)
162
+ data_product_id = data_product.data_product_id # data_product was populated by CollectDataProduct action already
163
+ current_master = GoodData::LCM2::Helpers.latest_master_project_from_nfs(domain_name, data_product_id, segment_id)
162
164
  end
163
165
  return 0 unless current_master
164
166
  current_master[:version].to_i
@@ -0,0 +1,116 @@
1
+ # frozen_string_literal: true
2
+ # (C) 2019-2020 GoodData Corporation
3
+ require_relative 'base_action'
4
+
5
+ # Migrate date dimension urn:gooddata:date or urn:custom:date to urn:custom_v2:date
6
+ module GoodData
7
+ module LCM2
8
+ class MigrateGdcDateDimension < BaseAction
9
+ DESCRIPTION = 'Migrate Gdc Date Dimension'
10
+ DATE_DIMENSION_CUSTOM_V2 = 'urn:custom_v2:date'
11
+ DATE_DIMENSION_OLD = %w[urn:gooddata:date urn:custom:date]
12
+
13
+ PARAMS = define_params(self) do
14
+ description 'Client Used for Connecting to GD'
15
+ param :gdc_gd_client, instance_of(Type::GdClientType), required: true
16
+
17
+ description 'Specifies how to synchronize LDM and resolve possible conflicts'
18
+ param :synchronize_ldm, instance_of(Type::SynchronizeLDM), required: false, default: 'diff_against_master_with_fallback'
19
+
20
+ description 'Synchronization Info'
21
+ param :synchronize, array_of(instance_of(Type::SynchronizationInfoType)), required: true, generated: true
22
+ end
23
+
24
+ RESULT_HEADER = %i[from to status]
25
+
26
+ class << self
27
+ def call(params)
28
+ results = []
29
+ params.synchronize.map do |segment_info|
30
+ result = migrate_date_dimension(params, segment_info)
31
+ results.concat(result)
32
+ end
33
+
34
+ {
35
+ results: results
36
+ }
37
+ end
38
+
39
+ def migrate_date_dimension(params, segment_info)
40
+ results = []
41
+ client = params.gdc_gd_client
42
+ latest_blueprint = segment_info[:from_blueprint]
43
+ # don't migrate when latest master doesn't contain custom v2 date.
44
+ return results unless contain_v2?(latest_blueprint)
45
+
46
+ previous_blueprint = segment_info[:previous_master]&.blueprint
47
+ # check latest master and previous master
48
+ master_upgrade_datasets = get_upgrade_dates(latest_blueprint, previous_blueprint) if params[:synchronize_ldm].downcase == 'diff_against_master' && previous_blueprint
49
+ unless master_upgrade_datasets&.empty?
50
+ segment_info[:to].pmap do |entry|
51
+ pid = entry[:pid]
52
+ to_project = client.projects(pid) || fail("Invalid 'to' project specified - '#{pid}'")
53
+ to_blueprint = to_project.blueprint
54
+ upgrade_datasets = get_upgrade_dates(latest_blueprint, to_blueprint)
55
+ next if upgrade_datasets.empty?
56
+
57
+ message = get_upgrade_message(upgrade_datasets)
58
+
59
+ results << {
60
+ from: segment_info[:from],
61
+ to: pid,
62
+ status: to_project.upgrade_custom_v2(message)
63
+ }
64
+ end
65
+ end
66
+
67
+ results
68
+ end
69
+
70
+ def get_upgrade_dates(src_blueprint, dest_blueprint)
71
+ dest_dates = get_date_dimensions(dest_blueprint) if dest_blueprint
72
+ src_dates = get_date_dimensions(src_blueprint) if src_blueprint
73
+
74
+ return false if dest_dates.empty? || src_dates.empty?
75
+
76
+ upgrade_datasets = []
77
+ dest_dates.each do |dest|
78
+ src_dim = get_date_dimension(src_blueprint, dest[:id])
79
+ next unless src_dim
80
+
81
+ upgrade_datasets << src_dim[:identifier] if upgrade?(src_dim, dest) && src_dim[:identifier]
82
+ end
83
+
84
+ upgrade_datasets
85
+ end
86
+
87
+ def get_upgrade_message(upgrade_datasets)
88
+ {
89
+ upgrade: {
90
+ dateDatasets: {
91
+ upgrade: "exact",
92
+ datasets: upgrade_datasets
93
+ }
94
+ }
95
+ }
96
+ end
97
+
98
+ def upgrade?(src_dim, dest_dim)
99
+ src_dim[:urn] == DATE_DIMENSION_CUSTOM_V2 && DATE_DIMENSION_OLD.any? { |e| dest_dim[:urn] == e }
100
+ end
101
+
102
+ def contain_v2?(blueprint)
103
+ get_date_dimensions(blueprint).any? { |e| e[:urn] == DATE_DIMENSION_CUSTOM_V2 }
104
+ end
105
+
106
+ def get_date_dimension(blueprint, id)
107
+ GoodData::Model::ProjectBlueprint.find_date_dimension(blueprint, id)
108
+ end
109
+
110
+ def get_date_dimensions(blueprint)
111
+ GoodData::Model::ProjectBlueprint.date_dimensions(blueprint)
112
+ end
113
+ end
114
+ end
115
+ end
116
+ end
@@ -0,0 +1,76 @@
1
+ # encoding: UTF-8
2
+ # frozen_string_literal: true
3
+ #
4
+ # Copyright (c) 2010-2017 GoodData Corporation. All rights reserved.
5
+ # This source code is licensed under the BSD-style license found in the
6
+ # LICENSE file in the root directory of this source tree.
7
+
8
+ require_relative 'base_action'
9
+
10
+ module GoodData
11
+ module LCM2
12
+ class SetMasterProject < BaseAction
13
+ DESCRIPTION = 'Set master project'
14
+
15
+ PARAMS = define_params(self) do
16
+ description 'Organization Name'
17
+ param :organization, instance_of(Type::StringType), required: false
18
+
19
+ description 'Domain'
20
+ param :domain, instance_of(Type::StringType), required: false
21
+
22
+ description 'ADS Client'
23
+ param :ads_client, instance_of(Type::AdsClientType), required: false
24
+
25
+ description 'Table Name'
26
+ param :release_table_name, instance_of(Type::StringType), required: false
27
+
28
+ description 'Segments to manage'
29
+ param :segments, array_of(instance_of(Type::SegmentType)), required: true
30
+
31
+ description 'DataProduct to manage'
32
+ param :data_product, instance_of(Type::GDDataProductType), required: false
33
+
34
+ description 'Released master project should be used in next rollout'
35
+ param :set_master_project, instance_of(Type::StringType), required: false
36
+ end
37
+
38
+ class << self
39
+ def call(params)
40
+ results = []
41
+ domain_name = params.organization || params.domain
42
+ data_product = params.data_product
43
+ params.segments.each do |segment_in|
44
+ version = get_latest_version(params, domain_name, data_product.data_product_id, segment_in.segment_id) + 1
45
+ segment_in[:data_product_id] = data_product.data_product_id
46
+ segment_in[:master_pid] = params.set_master_project
47
+ segment_in[:version] = version
48
+ segment_in[:timestamp] = Time.now.utc.iso8601
49
+
50
+ results << {
51
+ data_product_id: data_product.data_product_id,
52
+ segment_id: segment_in.segment_id,
53
+ version: version
54
+ }
55
+ end
56
+ results
57
+ end
58
+
59
+ def get_latest_version(params, domain_name, data_product_id, segment_id)
60
+ if params.ads_client
61
+ current_master = GoodData::LCM2::Helpers.latest_master_project_from_ads(
62
+ params.release_table_name,
63
+ params.ads_client,
64
+ segment_id
65
+ )
66
+ else
67
+ current_master = GoodData::LCM2::Helpers.latest_master_project_from_nfs(domain_name, data_product_id, segment_id)
68
+ end
69
+ return 0 unless current_master
70
+
71
+ current_master[:version].to_i
72
+ end
73
+ end
74
+ end
75
+ end
76
+ end
@@ -68,7 +68,7 @@ module GoodData
68
68
  segment.segment_id
69
69
  )
70
70
  else
71
- current_master = GoodData::LCM2::Helpers.latest_master_project_from_nfs(domain_name, segment.segment_id)
71
+ current_master = GoodData::LCM2::Helpers.latest_master_project_from_nfs(domain_name, data_product.data_product_id, segment.segment_id)
72
72
  end
73
73
 
74
74
  # TODO: Check res.first.nil? || res.first[:master_project_id].nil?
@@ -80,8 +80,7 @@ module GoodData
80
80
  res = GoodData::Helpers.symbolize_keys(res)
81
81
 
82
82
  if res[:syncedResult][:errors]
83
- params.gdc_logger.error "Error: #{res[:syncedResult][:errors].pretty_inspect}"
84
- fail "Failed to sync processes/schedules for segment #{segment_id}"
83
+ fail "Failed to sync processes/schedules for segment #{segment_id}. Error: #{res[:syncedResult][:errors].pretty_inspect}"
85
84
  end
86
85
 
87
86
  if res[:syncedResult][:clients]
@@ -44,8 +44,13 @@ module GoodData
44
44
 
45
45
  description 'Specifies how to synchronize LDM and resolve possible conflicts'
46
46
  param :synchronize_ldm, instance_of(Type::SynchronizeLDM), required: false, default: 'diff_against_master_with_fallback'
47
+
48
+ description 'Enables handling of deprecated objects in the logical data model.'
49
+ param :include_deprecated, instance_of(Type::BooleanType), required: false, default: false
47
50
  end
48
51
 
52
+ RESULT_HEADER = %i[from to status]
53
+
49
54
  class << self
50
55
  def call(params)
51
56
  results = []
@@ -70,19 +75,29 @@ module GoodData
70
75
  results = []
71
76
  client = params.gdc_gd_client
72
77
  exclude_fact_rule = params.exclude_fact_rule.to_b
78
+ include_deprecated = params.include_deprecated.to_b
73
79
  from_pid = segment_info[:from]
74
80
  from = params.development_client.projects(from_pid) || fail("Invalid 'from' project specified - '#{from_pid}'")
75
-
76
81
  GoodData.logger.info "Creating Blueprint, project: '#{from.title}', PID: #{from_pid}"
77
82
  blueprint = from.blueprint(include_ca: params.include_computed_attributes.to_b)
83
+ segment_info[:from_blueprint] = blueprint
78
84
  maql_diff = nil
79
85
  previous_master = segment_info[:previous_master]
80
86
  diff_against_master = %w(diff_against_master_with_fallback diff_against_master)
81
87
  .include?(params[:synchronize_ldm].downcase)
88
+ GoodData.logger.info "Synchronize LDM mode: '#{params[:synchronize_ldm].downcase}'"
82
89
  if previous_master && diff_against_master
83
90
  maql_diff_params = [:includeGrain]
84
91
  maql_diff_params << :excludeFactRule if exclude_fact_rule
92
+ maql_diff_params << :includeDeprecated if include_deprecated
85
93
  maql_diff = previous_master.maql_diff(blueprint: blueprint, params: maql_diff_params)
94
+ chunks = maql_diff['projectModelDiff']['updateScripts']
95
+ if chunks.empty?
96
+ GoodData.logger.info "Synchronize LDM to clients will not proceed in mode \
97
+ '#{params[:synchronize_ldm].downcase}' due to no LDM changes in the new master project. \
98
+ If you had changed LDM of clients manually, please use mode 'diff_against_clients' \
99
+ to force synchronize LDM to clients"
100
+ end
86
101
  end
87
102
 
88
103
  segment_info[:to] = segment_info[:to].pmap do |entry|
@@ -96,7 +111,8 @@ module GoodData
96
111
  update_preference: params[:update_preference],
97
112
  exclude_fact_rule: exclude_fact_rule,
98
113
  execute_ca_scripts: false,
99
- maql_diff: maql_diff
114
+ maql_diff: maql_diff,
115
+ include_deprecated: include_deprecated
100
116
  )
101
117
  rescue MaqlExecutionError => e
102
118
  GoodData.logger.info("Applying MAQL to project #{to_project.title} - #{pid} failed. Reason: #{e}")
@@ -106,7 +122,8 @@ module GoodData
106
122
  blueprint,
107
123
  update_preference: params[:update_preference],
108
124
  exclude_fact_rule: exclude_fact_rule,
109
- execute_ca_scripts: false
125
+ execute_ca_scripts: false,
126
+ include_deprecated: include_deprecated
110
127
  )
111
128
  end
112
129
 
@@ -121,6 +121,7 @@ module GoodData
121
121
  users_brick_input: params.users_brick_users
122
122
  }
123
123
  all_clients = domain.clients(:all, data_product).to_a
124
+ GoodData.gd_logger.info("Synchronizing in mode=#{mode}, number_of_clients=#{all_clients.size}, data_rows=#{user_filters.size}")
124
125
 
125
126
  GoodData.logger.info("Synchronizing in mode \"#{mode}\"")
126
127
  case mode
@@ -131,6 +132,8 @@ module GoodData
131
132
  filter = UserBricksHelper.resolve_client_id(domain, project, params.data_product)
132
133
  end
133
134
  user_filters = user_filters.select { |f| f[:pid] == filter } if filter
135
+
136
+ GoodData.gd_logger.info("Synchronizing in mode=#{mode}, project_id=#{project.pid}, data_rows=#{user_filters.size}")
134
137
  sync_user_filters(project, user_filters, run_params, symbolized_config)
135
138
  when 'sync_multiple_projects_based_on_pid', 'sync_multiple_projects_based_on_custom_id'
136
139
  users_by_project = run_params[:users_brick_input].group_by { |u| u[:pid] }
@@ -144,6 +147,8 @@ module GoodData
144
147
  elsif mode == 'sync_multiple_projects_based_on_pid'
145
148
  current_project = client.projects(id)
146
149
  end
150
+
151
+ GoodData.gd_logger.info("Synchronizing in mode=#{mode}, project_id=#{id}, data_rows=#{new_filters.size}")
147
152
  sync_user_filters(current_project, new_filters, run_params.merge(users_brick_input: users), symbolized_config)
148
153
  end
149
154
  when 'sync_domain_client_workspaces'
@@ -170,6 +175,8 @@ module GoodData
170
175
  fail "Client #{client_id} does not have project." unless current_project
171
176
 
172
177
  working_client_ids << client_id
178
+
179
+ GoodData.gd_logger.info("Synchronizing in mode=#{mode}, client_id=#{client_id}, data_rows=#{new_filters.size}")
173
180
  partial_results = sync_user_filters(current_project, new_filters, run_params.merge(users_brick_input: users), symbolized_config)
174
181
  results.concat(partial_results[:results])
175
182
  end
@@ -182,6 +189,8 @@ module GoodData
182
189
  current_project = c.project
183
190
  users = users_by_project[c.client_id]
184
191
  params.gdc_logger.info "Delete all filters in project #{current_project.pid} of client #{c.client_id}"
192
+
193
+ GoodData.gd_logger.info("Delete all filters in project_id=#{current_project.pid}, client_id=#{c.client_id}")
185
194
  current_results = sync_user_filters(current_project, [], run_params.merge(users_brick_input: users), symbolized_config)
186
195
 
187
196
  results.concat(current_results[:results])
@@ -214,10 +223,21 @@ module GoodData
214
223
  multiple_projects_column = params.multiple_projects_column
215
224
  data_source = GoodData::Helpers::DataSource.new(params.input_source)
216
225
 
217
- without_check(PARAMS, params) do
218
- CSV.foreach(File.open(data_source.realize(params), 'r:UTF-8'), headers: csv_with_headers, return_headers: false, encoding: 'utf-8') do |row|
219
- filters << row.to_hash.merge(pid: row[multiple_projects_column])
226
+ tmp = without_check(PARAMS, params) do
227
+ File.open(data_source.realize(params), 'r:UTF-8')
228
+ end
229
+
230
+ begin
231
+ GoodData.logger.info('Start reading data')
232
+ row_count = 0
233
+ CSV.foreach(tmp, :headers => csv_with_headers, :return_headers => false, :header_converters => :downcase, :encoding => 'utf-8') do |row|
234
+ filters << row.to_hash.merge(pid: row[multiple_projects_column.downcase])
235
+ row_count += 1
236
+ GoodData.logger.info("Read #{row_count} rows") if (row_count % 50_000).zero?
220
237
  end
238
+ GoodData.logger.info("Done reading data, total #{row_count} rows")
239
+ rescue Exception => e # rubocop:disable RescueException
240
+ fail "There was an error during loading data. Message: #{e.message}. Error: #{e}"
221
241
  end
222
242
 
223
243
  if filters.empty? && %w(sync_multiple_projects_based_on_pid sync_multiple_projects_based_on_custom_id).include?(params.sync_mode)