gooddata 2.1.8-java → 2.1.9-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (55) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +6 -0
  3. data/.travis.yml +1 -1
  4. data/Dockerfile +9 -4
  5. data/Dockerfile.jruby +4 -4
  6. data/Dockerfile.ruby +5 -4
  7. data/SDK_VERSION +1 -1
  8. data/VERSION +1 -1
  9. data/bin/provision.sh +2 -0
  10. data/bin/release.sh +2 -0
  11. data/bin/rollout.sh +2 -0
  12. data/bin/run_brick.rb +28 -7
  13. data/bin/test_projects_cleanup.rb +4 -0
  14. data/bin/user_filters.sh +2 -0
  15. data/ci.rake +1 -1
  16. data/dev-gooddata-sso.pub.encrypted +40 -40
  17. data/gooddata.gemspec +5 -1
  18. data/lcm.rake +10 -0
  19. data/lib/gooddata/bricks/middleware/execution_result_middleware.rb +68 -0
  20. data/lib/gooddata/bricks/middleware/logger_middleware.rb +2 -1
  21. data/lib/gooddata/bricks/middleware/mask_logger_decorator.rb +5 -1
  22. data/lib/gooddata/bricks/pipeline.rb +7 -0
  23. data/lib/gooddata/cloud_resources/cloud_resouce_factory.rb +28 -0
  24. data/lib/gooddata/cloud_resources/cloud_resource_client.rb +24 -0
  25. data/lib/gooddata/cloud_resources/cloud_resources.rb +12 -0
  26. data/lib/gooddata/cloud_resources/redshift/drivers/log4j.properties +15 -0
  27. data/lib/gooddata/cloud_resources/redshift/redshift_client.rb +100 -0
  28. data/lib/gooddata/exceptions/invalid_env_error.rb +15 -0
  29. data/lib/gooddata/helpers/data_helper.rb +10 -0
  30. data/lib/gooddata/helpers/global_helpers.rb +4 -0
  31. data/lib/gooddata/helpers/global_helpers_params.rb +4 -7
  32. data/lib/gooddata/lcm/actions/collect_segment_clients.rb +4 -1
  33. data/lib/gooddata/lcm/actions/collect_segments.rb +1 -2
  34. data/lib/gooddata/lcm/actions/create_segment_masters.rb +5 -3
  35. data/lib/gooddata/lcm/actions/synchronize_clients.rb +1 -1
  36. data/lib/gooddata/lcm/actions/synchronize_etls_in_segment.rb +1 -2
  37. data/lib/gooddata/lcm/actions/synchronize_ldm.rb +10 -2
  38. data/lib/gooddata/lcm/actions/synchronize_user_filters.rb +22 -2
  39. data/lib/gooddata/lcm/actions/synchronize_users.rb +19 -0
  40. data/lib/gooddata/lcm/actions/update_release_table.rb +7 -1
  41. data/lib/gooddata/lcm/exceptions/lcm_execution_error.rb +16 -0
  42. data/lib/gooddata/lcm/helpers/release_table_helper.rb +16 -8
  43. data/lib/gooddata/lcm/lcm2.rb +6 -4
  44. data/lib/gooddata/models/execution.rb +0 -1
  45. data/lib/gooddata/models/execution_detail.rb +0 -1
  46. data/lib/gooddata/models/profile.rb +33 -11
  47. data/lib/gooddata/models/project.rb +2 -2
  48. data/lib/gooddata/models/project_creator.rb +2 -0
  49. data/lib/gooddata/models/schedule.rb +0 -1
  50. data/lib/gooddata/rest/client.rb +2 -2
  51. data/lib/gooddata/rest/connection.rb +5 -3
  52. data/rubydev_public.gpg.encrypted +51 -51
  53. data/rubydev_secret_keys.gpg.encrypted +109 -109
  54. metadata +12 -5
  55. data/lib/gooddata/extensions/hash.rb +0 -18
@@ -0,0 +1,28 @@
1
+ # encoding: UTF-8
2
+ #
3
+ # Copyright (c) 2010-2019 GoodData Corporation. All rights reserved.
4
+ # This source code is licensed under the BSD-style license found in the
5
+ # LICENSE file in the root directory of this source tree.
6
+
7
+ require 'active_support/core_ext/string/inflections'
8
+ require_relative 'cloud_resource_client'
9
+
10
+ module GoodData
11
+ module CloudResources
12
+ class CloudResourceFactory
13
+ class << self
14
+ def create(type, data = {}, opts = {})
15
+ clients = CloudResourceClient.descendants.select { |c| c.respond_to?("accept?") && c.send("accept?", type) }
16
+ raise "DataSource does not support type \"#{type}\"" if clients.empty?
17
+
18
+ res = clients[0].new(data)
19
+ opts.each do |key, value|
20
+ method = "#{key}="
21
+ res.send(method, value) if res.respond_to?(method)
22
+ end
23
+ res
24
+ end
25
+ end
26
+ end
27
+ end
28
+ end
@@ -0,0 +1,24 @@
1
+ # encoding: UTF-8
2
+ #
3
+ # Copyright (c) 2010-2019 GoodData Corporation. All rights reserved.
4
+ # This source code is licensed under the BSD-style license found in the
5
+ # LICENSE file in the root directory of this source tree.
6
+
7
+ module GoodData
8
+ module CloudResources
9
+ class CloudResourceClient
10
+ def self.inherited(klass)
11
+ @descendants ||= []
12
+ @descendants << klass
13
+ end
14
+
15
+ def self.descendants
16
+ @descendants || []
17
+ end
18
+
19
+ def realize_query(_query, _params)
20
+ raise NotImplementedError, 'Must be implemented in subclass'
21
+ end
22
+ end
23
+ end
24
+ end
@@ -0,0 +1,12 @@
1
+ # encoding: UTF-8
2
+ #
3
+ # Copyright (c) 2010-2019 GoodData Corporation. All rights reserved.
4
+ # This source code is licensed under the BSD-style license found in the
5
+ # LICENSE file in the root directory of this source tree.
6
+
7
+ require 'pathname'
8
+
9
+ base = Pathname(__FILE__).dirname.expand_path
10
+ Dir.glob(base + '**/*.rb').each do |file|
11
+ require file
12
+ end
@@ -0,0 +1,15 @@
1
+ #
2
+ # Copyright (C) 2007-2019, GoodData(R) Corporation. All rights reserved.
3
+ #
4
+
5
+ #=======================================================================================================================
6
+ # Root Logger
7
+ #=======================================================================================================================
8
+ #log4j.rootCategory=INFO, Syslog, Console
9
+ log4j.rootCategory=INFO
10
+
11
+ #=======================================================================================================================
12
+ # Logger with Higher Verbosity
13
+ #=======================================================================================================================
14
+ log4j.logger.com.amazonaws=INFO
15
+
@@ -0,0 +1,100 @@
1
+ # encoding: UTF-8
2
+ #
3
+ # Copyright (c) 2010-2019 GoodData Corporation. All rights reserved.
4
+ # This source code is licensed under the BSD-style license found in the
5
+ # LICENSE file in the root directory of this source tree.
6
+
7
+ require 'securerandom'
8
+ require 'java'
9
+ require 'pathname'
10
+ require_relative '../cloud_resource_client'
11
+
12
+ base = Pathname(__FILE__).dirname.expand_path
13
+ Dir.glob(base + 'drivers/*.jar').each do |file|
14
+ require file unless file.start_with?('lcm-redshift-driver')
15
+ end
16
+
17
+ module GoodData
18
+ module CloudResources
19
+ class RedshiftClient < CloudResourceClient
20
+ class << self
21
+ def accept?(type)
22
+ type == 'redshift'
23
+ end
24
+ end
25
+
26
+ def initialize(options = {})
27
+ raise("Data Source needs a client to Redshift to be able to query the storage but 'redshift_client' is empty.") unless options['redshift_client']
28
+
29
+ if options['redshift_client']['connection'].is_a?(Hash)
30
+ @database = options['redshift_client']['connection']['database']
31
+ @schema = options['redshift_client']['connection']['schema'] || 'public'
32
+ @url = options['redshift_client']['connection']['url']
33
+ @authentication = options['redshift_client']['connection']['authentication']
34
+ else
35
+ raise('Missing connection info for Redshift client')
36
+
37
+ end
38
+ @debug = options['debug'] == true || options['debug'] == 'true'
39
+
40
+ Java.com.amazon.redshift.jdbc42.Driver
41
+ base = Pathname(__FILE__).dirname
42
+ org.apache.log4j.PropertyConfigurator.configure("#{base}/drivers/log4j.properties")
43
+ end
44
+
45
+ def realize_query(query, _params)
46
+ GoodData.gd_logger.info("Realize SQL query: type=redshift status=started")
47
+
48
+ connect
49
+ filename = "#{SecureRandom.urlsafe_base64(6)}_#{Time.now.to_i}.csv"
50
+ measure = Benchmark.measure do
51
+ statement = @connection.create_statement
52
+ schema_sql = "set search_path to #{@schema}"
53
+ statement.execute(schema_sql)
54
+
55
+ has_result = statement.execute(query)
56
+ if has_result
57
+ result = statement.get_result_set
58
+ metadata = result.get_meta_data
59
+ col_count = metadata.column_count
60
+ CSV.open(filename, 'wb', :force_quotes => true) do |csv|
61
+ csv << Array(1..col_count).map { |i| metadata.get_column_name(i) } # build the header
62
+ csv << Array(1..col_count).map { |i| result.get_string(i) } while result.next
63
+ end
64
+ end
65
+ end
66
+ GoodData.gd_logger.info("Realize SQL query: type=redshift status=finished duration=#{measure.real}")
67
+ filename
68
+ ensure
69
+ @connection.close unless @connection.nil?
70
+ @connection = nil
71
+ end
72
+
73
+ def connect
74
+ full_url = build_url(@url, @database)
75
+ GoodData.logger.info "Setting up connection to Redshift #{full_url}"
76
+
77
+ prop = java.util.Properties.new
78
+ if @authentication['basic']
79
+ prop.setProperty('UID', @authentication['basic']['userName'])
80
+ prop.setProperty('PWD', @authentication['basic']['password'])
81
+ else
82
+ prop.setProperty('AccessKeyID', @authentication['iam']['accessKeyId'])
83
+ prop.setProperty('SecretAccessKey', @authentication['iam']['secretAccessKey'])
84
+ prop.setProperty('DbUser', @authentication['iam']['dbUser'])
85
+ end
86
+
87
+ @connection = java.sql.DriverManager.getConnection(full_url, prop)
88
+ end
89
+
90
+ private
91
+
92
+ def build_url(url, database)
93
+ url_parts = url.split('?')
94
+ url_path = url_parts[0].chomp('/')
95
+ url_path += "/#{database}" if database && !url_path.end_with?("/#{database}")
96
+ url_parts.length > 1 ? url_path + '?' + url_parts[1] : url_path
97
+ end
98
+ end
99
+ end
100
+ end
@@ -0,0 +1,15 @@
1
+ # encoding: UTF-8
2
+ #
3
+ # Copyright (c) 2010-2019 GoodData Corporation. All rights reserved.
4
+ # This source code is licensed under the BSD-style license found in the
5
+ # LICENSE file in the root directory of this source tree.
6
+
7
+ module GoodData
8
+ class InvalidEnvError < RuntimeError
9
+ DEFAULT_MSG = 'Invalid environment: It must be JAVA platform'
10
+
11
+ def initialize(msg = DEFAULT_MSG)
12
+ super(msg)
13
+ end
14
+ end
15
+ end
@@ -44,6 +44,11 @@ module GoodData
44
44
  realize_link
45
45
  when 's3'
46
46
  realize_s3(params)
47
+ when 'redshift'
48
+ raise GoodData::InvalidEnvError, "DataSource does not support type \"#{source}\" on the platform #{RUBY_PLATFORM}" unless RUBY_PLATFORM =~ /java/
49
+
50
+ require_relative '../cloud_resources/cloud_resources'
51
+ realize_cloud_resource(source, params)
47
52
  else
48
53
  raise "DataSource does not support type \"#{source}\""
49
54
  end
@@ -55,6 +60,11 @@ module GoodData
55
60
 
56
61
  private
57
62
 
63
+ def realize_cloud_resource(type, params)
64
+ cloud_resource_client = GoodData::CloudResources::CloudResourceFactory.create(type, params)
65
+ cloud_resource_client.realize_query(@options[:query], params)
66
+ end
67
+
58
68
  def realize_query(params)
59
69
  query = DataSource.interpolate_sql_params(@options[:query], params)
60
70
  dwh = params['ads_client'] || params[:ads_client] || raise("Data Source needs a client to ads to be able to query the storage but 'ads_client' is empty.")
@@ -183,6 +183,10 @@ module GoodData
183
183
  end
184
184
  end
185
185
 
186
+ def deep_merge(source, target)
187
+ GoodData::Helpers::DeepMergeableHash[source].deep_merge(target)
188
+ end
189
+
186
190
  def undot(params)
187
191
  # for each key-value config given
188
192
  params.map do |k, v|
@@ -3,10 +3,6 @@
3
3
  # LICENSE file in the root directory of this source tree.
4
4
  require 'active_support/core_ext/hash/slice'
5
5
 
6
- require 'gooddata/extensions/hash'
7
-
8
- using HashExtensions
9
-
10
6
  module GoodData
11
7
  module Helpers
12
8
  ENCODED_PARAMS_KEY = 'gd_encoded_params'
@@ -102,7 +98,8 @@ module GoodData
102
98
 
103
99
  params.delete(key)
104
100
  params.delete(hidden_key)
105
- params = params.deep_merge(parsed_data_params).deep_merge(parsed_hidden_data_params)
101
+ params = GoodData::Helpers.deep_merge(params, parsed_data_params)
102
+ params = GoodData::Helpers.deep_merge(params, parsed_hidden_data_params)
106
103
 
107
104
  if options[:convert_pipe_delimited_params]
108
105
  convert_pipe_delimited_params = lambda do |args|
@@ -121,7 +118,7 @@ module GoodData
121
118
  end
122
119
 
123
120
  lines.reduce({}) do |a, e|
124
- a.deep_merge(e)
121
+ GoodData::Helpers.deep_merge(a, e)
125
122
  end
126
123
  end
127
124
 
@@ -129,7 +126,7 @@ module GoodData
129
126
  params.delete_if do |k, _|
130
127
  k.include?('|')
131
128
  end
132
- params = params.deep_merge(pipe_delimited_params)
129
+ params = GoodData::Helpers.deep_merge(params, pipe_delimited_params)
133
130
  end
134
131
 
135
132
  params
@@ -65,8 +65,11 @@ module GoodData
65
65
  segment.segment_id
66
66
  )
67
67
  else
68
- latest_master = GoodData::LCM2::Helpers.latest_master_project_from_nfs(domain_name, segment.segment_id)
68
+ data_product = params.data_product
69
+ data_product_id = data_product.data_product_id
70
+ latest_master = GoodData::LCM2::Helpers.latest_master_project_from_nfs(domain_name, data_product_id, segment.segment_id)
69
71
  end
72
+ raise 'Release table has no data' unless latest_master
70
73
 
71
74
  latest_master = client.projects(latest_master[:master_project_id])
72
75
 
@@ -51,8 +51,7 @@ module GoodData
51
51
  begin
52
52
  project = segment.master_project
53
53
  rescue RestClient::BadRequest => e
54
- params.gdc_logger.error "Failed to retrieve master project for segment #{segment.id}. Error: #{e}"
55
- raise
54
+ raise "Failed to retrieve master project for segment #{segment.id}. Error: #{e}"
56
55
  end
57
56
 
58
57
  raise "Master project for segment #{segment.id} doesn't exist." unless project
@@ -71,7 +71,7 @@ module GoodData
71
71
  ads_output_stage_prefix = segment_in.ads_output_stage_prefix
72
72
 
73
73
  # Create master project Postgres
74
- version = get_project_version(params, domain_name, segment_id) + 1
74
+ version = get_project_version(params, domain_name, data_product, segment_id) + 1
75
75
 
76
76
  master_name = segment_in.master_name.gsub('#{version}', version.to_s)
77
77
 
@@ -113,6 +113,7 @@ module GoodData
113
113
  status = 'modified'
114
114
  end
115
115
 
116
+ segment_in[:data_product_id] = data_product.data_product_id
116
117
  segment_in[:master_pid] = project.pid
117
118
  segment_in[:version] = version
118
119
  segment_in[:timestamp] = Time.now.utc.iso8601
@@ -150,7 +151,7 @@ module GoodData
150
151
  }
151
152
  end
152
153
 
153
- def get_project_version(params, domain_name, segment_id)
154
+ def get_project_version(params, domain_name, data_product, segment_id)
154
155
  if params.ads_client
155
156
  current_master = GoodData::LCM2::Helpers.latest_master_project_from_ads(
156
157
  params.release_table_name,
@@ -158,7 +159,8 @@ module GoodData
158
159
  segment_id
159
160
  )
160
161
  else
161
- current_master = GoodData::LCM2::Helpers.latest_master_project_from_nfs(domain_name, segment_id)
162
+ data_product_id = data_product.data_product_id # data_product was populated by CollectDataProduct action already
163
+ current_master = GoodData::LCM2::Helpers.latest_master_project_from_nfs(domain_name, data_product_id, segment_id)
162
164
  end
163
165
  return 0 unless current_master
164
166
  current_master[:version].to_i
@@ -68,7 +68,7 @@ module GoodData
68
68
  segment.segment_id
69
69
  )
70
70
  else
71
- current_master = GoodData::LCM2::Helpers.latest_master_project_from_nfs(domain_name, segment.segment_id)
71
+ current_master = GoodData::LCM2::Helpers.latest_master_project_from_nfs(domain_name, data_product.data_product_id, segment.segment_id)
72
72
  end
73
73
 
74
74
  # TODO: Check res.first.nil? || res.first[:master_project_id].nil?
@@ -80,8 +80,7 @@ module GoodData
80
80
  res = GoodData::Helpers.symbolize_keys(res)
81
81
 
82
82
  if res[:syncedResult][:errors]
83
- params.gdc_logger.error "Error: #{res[:syncedResult][:errors].pretty_inspect}"
84
- fail "Failed to sync processes/schedules for segment #{segment_id}"
83
+ fail "Failed to sync processes/schedules for segment #{segment_id}. Error: #{res[:syncedResult][:errors].pretty_inspect}"
85
84
  end
86
85
 
87
86
  if res[:syncedResult][:clients]
@@ -44,6 +44,9 @@ module GoodData
44
44
 
45
45
  description 'Specifies how to synchronize LDM and resolve possible conflicts'
46
46
  param :synchronize_ldm, instance_of(Type::SynchronizeLDM), required: false, default: 'diff_against_master_with_fallback'
47
+
48
+ description 'Enables handling of deprecated objects in the logical data model.'
49
+ param :include_deprecated, instance_of(Type::BooleanType), required: false, default: false
47
50
  end
48
51
 
49
52
  class << self
@@ -70,6 +73,7 @@ module GoodData
70
73
  results = []
71
74
  client = params.gdc_gd_client
72
75
  exclude_fact_rule = params.exclude_fact_rule.to_b
76
+ include_deprecated = params.include_deprecated.to_b
73
77
  from_pid = segment_info[:from]
74
78
  from = params.development_client.projects(from_pid) || fail("Invalid 'from' project specified - '#{from_pid}'")
75
79
 
@@ -79,9 +83,11 @@ module GoodData
79
83
  previous_master = segment_info[:previous_master]
80
84
  diff_against_master = %w(diff_against_master_with_fallback diff_against_master)
81
85
  .include?(params[:synchronize_ldm].downcase)
86
+ GoodData.logger.info "Synchronize LDM mode: '#{params[:synchronize_ldm].downcase}'"
82
87
  if previous_master && diff_against_master
83
88
  maql_diff_params = [:includeGrain]
84
89
  maql_diff_params << :excludeFactRule if exclude_fact_rule
90
+ maql_diff_params << :includeDeprecated if include_deprecated
85
91
  maql_diff = previous_master.maql_diff(blueprint: blueprint, params: maql_diff_params)
86
92
  end
87
93
 
@@ -96,7 +102,8 @@ module GoodData
96
102
  update_preference: params[:update_preference],
97
103
  exclude_fact_rule: exclude_fact_rule,
98
104
  execute_ca_scripts: false,
99
- maql_diff: maql_diff
105
+ maql_diff: maql_diff,
106
+ include_deprecated: include_deprecated
100
107
  )
101
108
  rescue MaqlExecutionError => e
102
109
  GoodData.logger.info("Applying MAQL to project #{to_project.title} - #{pid} failed. Reason: #{e}")
@@ -106,7 +113,8 @@ module GoodData
106
113
  blueprint,
107
114
  update_preference: params[:update_preference],
108
115
  exclude_fact_rule: exclude_fact_rule,
109
- execute_ca_scripts: false
116
+ execute_ca_scripts: false,
117
+ include_deprecated: include_deprecated
110
118
  )
111
119
  end
112
120
 
@@ -121,6 +121,7 @@ module GoodData
121
121
  users_brick_input: params.users_brick_users
122
122
  }
123
123
  all_clients = domain.clients(:all, data_product).to_a
124
+ GoodData.gd_logger.info("Synchronizing in mode=#{mode}, number_of_clients=#{all_clients.size}, data_rows=#{user_filters.size}")
124
125
 
125
126
  GoodData.logger.info("Synchronizing in mode \"#{mode}\"")
126
127
  case mode
@@ -131,6 +132,8 @@ module GoodData
131
132
  filter = UserBricksHelper.resolve_client_id(domain, project, params.data_product)
132
133
  end
133
134
  user_filters = user_filters.select { |f| f[:pid] == filter } if filter
135
+
136
+ GoodData.gd_logger.info("Synchronizing in mode=#{mode}, project_id=#{project.pid}, data_rows=#{user_filters.size}")
134
137
  sync_user_filters(project, user_filters, run_params, symbolized_config)
135
138
  when 'sync_multiple_projects_based_on_pid', 'sync_multiple_projects_based_on_custom_id'
136
139
  users_by_project = run_params[:users_brick_input].group_by { |u| u[:pid] }
@@ -144,6 +147,8 @@ module GoodData
144
147
  elsif mode == 'sync_multiple_projects_based_on_pid'
145
148
  current_project = client.projects(id)
146
149
  end
150
+
151
+ GoodData.gd_logger.info("Synchronizing in mode=#{mode}, project_id=#{id}, data_rows=#{new_filters.size}")
147
152
  sync_user_filters(current_project, new_filters, run_params.merge(users_brick_input: users), symbolized_config)
148
153
  end
149
154
  when 'sync_domain_client_workspaces'
@@ -170,6 +175,8 @@ module GoodData
170
175
  fail "Client #{client_id} does not have project." unless current_project
171
176
 
172
177
  working_client_ids << client_id
178
+
179
+ GoodData.gd_logger.info("Synchronizing in mode=#{mode}, client_id=#{client_id}, data_rows=#{new_filters.size}")
173
180
  partial_results = sync_user_filters(current_project, new_filters, run_params.merge(users_brick_input: users), symbolized_config)
174
181
  results.concat(partial_results[:results])
175
182
  end
@@ -182,6 +189,8 @@ module GoodData
182
189
  current_project = c.project
183
190
  users = users_by_project[c.client_id]
184
191
  params.gdc_logger.info "Delete all filters in project #{current_project.pid} of client #{c.client_id}"
192
+
193
+ GoodData.gd_logger.info("Delete all filters in project_id=#{current_project.pid}, client_id=#{c.client_id}")
185
194
  current_results = sync_user_filters(current_project, [], run_params.merge(users_brick_input: users), symbolized_config)
186
195
 
187
196
  results.concat(current_results[:results])
@@ -214,10 +223,21 @@ module GoodData
214
223
  multiple_projects_column = params.multiple_projects_column
215
224
  data_source = GoodData::Helpers::DataSource.new(params.input_source)
216
225
 
217
- without_check(PARAMS, params) do
218
- CSV.foreach(File.open(data_source.realize(params), 'r:UTF-8'), headers: csv_with_headers, return_headers: false, encoding: 'utf-8') do |row|
226
+ tmp = without_check(PARAMS, params) do
227
+ File.open(data_source.realize(params), 'r:UTF-8')
228
+ end
229
+
230
+ begin
231
+ GoodData.logger.info('Start reading data')
232
+ row_count = 0
233
+ CSV.foreach(tmp, headers: csv_with_headers, return_headers: false, encoding: 'utf-8') do |row|
219
234
  filters << row.to_hash.merge(pid: row[multiple_projects_column])
235
+ row_count += 1
236
+ GoodData.logger.info("Read #{row_count} rows") if (row_count % 50_000).zero?
220
237
  end
238
+ GoodData.logger.info("Done reading data, total #{row_count} rows")
239
+ rescue Exception => e # rubocop:disable RescueException
240
+ fail "There was an error during loading data. Message: #{e.message}. Error: #{e}"
221
241
  end
222
242
 
223
243
  if filters.empty? && %w(sync_multiple_projects_based_on_pid sync_multiple_projects_based_on_custom_id).include?(params.sync_mode)