gooddata 2.1.9-java → 2.1.14-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (47) hide show
  1. checksums.yaml +4 -4
  2. data/.gdc-ii-config.yaml +1 -1
  3. data/.rubocop.yml +1 -0
  4. data/.travis.yml +1 -3
  5. data/CHANGELOG.md +49 -0
  6. data/Dockerfile +17 -7
  7. data/README.md +17 -0
  8. data/SDK_VERSION +1 -1
  9. data/VERSION +1 -1
  10. data/bin/run_brick.rb +3 -0
  11. data/bin/test_projects_cleanup.rb +6 -2
  12. data/ci/bigquery/pom.xml +54 -0
  13. data/ci/redshift/pom.xml +73 -0
  14. data/ci/snowflake/pom.xml +57 -0
  15. data/dev-gooddata-sso.pub.encrypted +40 -40
  16. data/gdc_fossa_lcm.yaml +2 -0
  17. data/gdc_fossa_ruby_sdk.yaml +4 -0
  18. data/gooddata.gemspec +3 -3
  19. data/k8s/charts/lcm-bricks/Chart.yaml +1 -1
  20. data/k8s/charts/lcm-bricks/templates/prometheus/alertingRules.yaml +22 -12
  21. data/lcm.rake +10 -6
  22. data/lib/gooddata/cloud_resources/bigquery/bigquery_client.rb +86 -0
  23. data/lib/gooddata/cloud_resources/bigquery/drivers/.gitkeepme +0 -0
  24. data/lib/gooddata/cloud_resources/redshift/redshift_client.rb +3 -2
  25. data/lib/gooddata/cloud_resources/snowflake/drivers/.gitkeepme +0 -0
  26. data/lib/gooddata/cloud_resources/snowflake/snowflake_client.rb +84 -0
  27. data/lib/gooddata/helpers/data_helper.rb +1 -1
  28. data/lib/gooddata/helpers/data_source_helpers.rb +47 -0
  29. data/lib/gooddata/helpers/global_helpers_params.rb +2 -2
  30. data/lib/gooddata/lcm/actions/collect_clients.rb +6 -6
  31. data/lib/gooddata/lcm/actions/collect_dynamic_schedule_params.rb +6 -6
  32. data/lib/gooddata/lcm/actions/collect_tagged_objects.rb +2 -1
  33. data/lib/gooddata/lcm/actions/collect_users_brick_users.rb +7 -6
  34. data/lib/gooddata/lcm/actions/migrate_gdc_date_dimension.rb +116 -0
  35. data/lib/gooddata/lcm/actions/set_master_project.rb +76 -0
  36. data/lib/gooddata/lcm/actions/synchronize_ldm.rb +10 -1
  37. data/lib/gooddata/lcm/actions/synchronize_user_filters.rb +2 -2
  38. data/lib/gooddata/lcm/actions/synchronize_users.rb +31 -30
  39. data/lib/gooddata/lcm/lcm2.rb +22 -1
  40. data/lib/gooddata/models/domain.rb +17 -15
  41. data/lib/gooddata/models/from_wire.rb +1 -0
  42. data/lib/gooddata/models/metadata/scheduled_mail.rb +1 -1
  43. data/lib/gooddata/models/process.rb +11 -3
  44. data/lib/gooddata/models/project.rb +118 -29
  45. data/rubydev_public.gpg.encrypted +51 -51
  46. data/rubydev_secret_keys.gpg.encrypted +109 -109
  47. metadata +22 -10
@@ -0,0 +1,2 @@
1
+ ---
2
+ fossa_project: "gooddata-ruby-lcm"
@@ -0,0 +1,4 @@
1
+ ---
2
+ fossa_project: "gooddata-ruby-sdk"
3
+ ignored_paths:
4
+ - 'ci/.*'
@@ -49,15 +49,15 @@ Gem::Specification.new do |s|
49
49
  s.add_development_dependency 'pronto', '~> 0.10' if RUBY_PLATFORM != 'java'
50
50
  s.add_development_dependency 'pronto-rubocop', '~> 0.9' if RUBY_PLATFORM != 'java'
51
51
  s.add_development_dependency 'pronto-reek', '~> 0.9' if RUBY_PLATFORM != 'java'
52
- s.add_development_dependency 'vcr'
52
+ s.add_development_dependency 'vcr', '5.0.0'
53
53
  s.add_development_dependency 'hashdiff', '~> 0.4'
54
54
 
55
55
  s.add_development_dependency 'sqlite3' if RUBY_PLATFORM != 'java'
56
56
 
57
57
  if RUBY_VERSION >= '2.5'
58
- s.add_dependency 'activesupport', '> 4.2.9', '< 6.1'
58
+ s.add_dependency 'activesupport', '>= 6.0.3.1', '< 6.1'
59
59
  else
60
- s.add_dependency 'activesupport', '> 4.2.9', '< 6.0'
60
+ s.add_dependency 'activesupport', '>= 5.2.4.3', '< 6.0'
61
61
  end
62
62
 
63
63
  s.add_dependency 'aws-sdk-s3', '~> 1.16'
@@ -1,4 +1,4 @@
1
1
  apiVersion: v1
2
2
  name: lcm-bricks
3
3
  description: LCM Bricks
4
- version: 2.0.1
4
+ version: 2.0.3
@@ -20,7 +20,7 @@ data:
20
20
  expr: container_pod:lcm_pod_container_status_restarts:increase10m >= 1
21
21
  labels:
22
22
  severity: warning
23
- team: lcm # switch to msf in production
23
+ team: lcm
24
24
  cluster_id: {{ .Values.clusterId }}
25
25
  annotations:
26
26
  description: "There is more than 0 restarts of {{`{{ $labels.pod }}`}} pod in the last 10 minutes"
@@ -28,8 +28,8 @@ data:
28
28
  - alert: "[LCM] Pod has too many restarts on cluster={{ .Values.clusterId }}"
29
29
  expr: container_pod:lcm_pod_container_status_restarts:increase10m >= 2
30
30
  labels:
31
- severity: critical
32
- team: lcm # switch to msf in production
31
+ severity: warning
32
+ team: lcm
33
33
  cluster_id: {{ .Values.clusterId }}
34
34
  annotations:
35
35
  description: "There is more than 1 restart of {{`{{ $labels.pod }}`}} pod in the last 10 minutes"
@@ -40,7 +40,7 @@ data:
40
40
  expr: container_pod:lcm_pod_container_status_oomkilled:increase10m >= 1
41
41
  labels:
42
42
  severity: warning
43
- team: lcm # switch to msf in production
43
+ team: lcm
44
44
  cluster_id: {{ .Values.clusterId }}
45
45
  annotations:
46
46
  description: "{{`{{ $labels.pod }}`}} was OOMKilled in the last 30 minutes. Investigate and/or increase memoryRequest or memoryLimit."
@@ -48,8 +48,8 @@ data:
48
48
  - alert: "[LCM] OOMKill occured on cluster={{ .Values.clusterId }}"
49
49
  expr: container_pod:lcm_pod_container_status_oomkilled:increase10m >= 2
50
50
  labels:
51
- severity: critical
52
- team: lcm # switch to msf in production
51
+ severity: warning
52
+ team: lcm
53
53
  cluster_id: {{ .Values.clusterId }}
54
54
  annotations:
55
55
  description: "{{`{{ $labels.pod }}`}} was OOMKilled in the last 10 minutes. Investigate and/or increase memoryRequest or memoryLimit."
@@ -58,8 +58,8 @@ data:
58
58
  expr: rate(container_cpu_cfs_throttled_seconds_total{namespace='{{ .Release.Namespace }}'}[1m]) > 1
59
59
  for: 5m
60
60
  labels:
61
- severity: critical
62
- team: lcm # switch to msf in production
61
+ severity: warning
62
+ team: lcm
63
63
  cluster_id: {{ .Values.clusterId }}
64
64
  annotations:
65
65
  description: "{{`{{ $labels.pod_name }}`}} container is beeing throttled and probably hit CPU limit. Investigate root cause and increase limit and/or number of replicas if necessary."
@@ -68,8 +68,8 @@ data:
68
68
  expr: rate(jvm_gc_pause_seconds_sum{kubernetes_namespace='{{ .Release.Namespace }}'}[1m]) > 1
69
69
  for: 5m
70
70
  labels:
71
- severity: critical
72
- team: lcm # switch to msf in production
71
+ severity: warning
72
+ team: lcm
73
73
  cluster_id: {{ .Values.clusterId }}
74
74
  annotations:
75
75
  description: "{{`{{ $labels.kubernetes_pod_name }}`}} container is spending too much time in pause garbage collector. Investigate root cause and increase heap size and/or number of replicas if necessary."
@@ -77,9 +77,19 @@ data:
77
77
  - alert: "[LCM] there is more than 100 jobs on cluster={{ .Values.clusterId }}"
78
78
  expr: count(kube_job_info{namespace="lcm"}) > 100
79
79
  labels:
80
- severity: critical
81
- team: lcm # switch to msf in production
80
+ severity: warning
81
+ team: lcm
82
82
  cluster_id: {{ .Values.clusterId }}
83
83
  annotations:
84
84
  description: "There is more than 100 jobs in LCM namespace. They are likely not deleted."
85
85
  summary: "There is more than 100 jobs in LCM namespace."
86
+ - alert: "[LCM] Resource quotas hit CPU limit on cluster={{ .Values.clusterId }}"
87
+ expr: kube_resourcequota{namespace='{{ .Release.Namespace }}',resource="limits.cpu",type="hard"} - ignoring(type) kube_resourcequota{namespace='{{ .Release.Namespace }}',resource="limits.cpu",type="used"} == 0
88
+ labels:
89
+ severity: warning
90
+ team: lcm
91
+ cluster_id: {{ .Values.clusterId }}
92
+ annotations:
93
+ description: "We are hitting CPU limit in LCM namespace."
94
+ summary: "We are hitting CPU limit in LCM namespace."
95
+
data/lcm.rake CHANGED
@@ -118,7 +118,7 @@ end
118
118
  namespace :docker do
119
119
  desc 'Build Docker image'
120
120
  task :build do
121
- Rake::Task["maven:build_redshift"].invoke
121
+ Rake::Task["maven:build_dependencies"].invoke
122
122
  system('docker build -f Dockerfile.jruby -t gooddata/appstore .')
123
123
  end
124
124
 
@@ -129,11 +129,15 @@ namespace :docker do
129
129
  end
130
130
 
131
131
  namespace :maven do
132
- task :build_redshift do
133
- system("cp -rf spec/lcm/redshift_driver_pom.xml tmp/pom.xml")
134
- system('mvn -f tmp/pom.xml clean install -P binary-packaging')
135
- system('cp -rf tmp/target/*.jar lib/gooddata/cloud_resources/redshift/drivers/')
136
- system('rm -rf lib/gooddata/cloud_resources/redshift/drivers/lcm-redshift-driver*.jar')
132
+ task :build_dependencies do
133
+ system('mvn -f ci/snowflake/pom.xml clean install -P binary-packaging')
134
+ system('cp -rf ci/snowflake/target/*.jar lib/gooddata/cloud_resources/snowflake/drivers/')
135
+
136
+ system('mvn -f ci/bigquery/pom.xml clean install -P binary-packaging')
137
+ system('cp -rf ci/bigquery/target/*.jar lib/gooddata/cloud_resources/bigquery/drivers/')
138
+
139
+ system('mvn -f ci/redshift/pom.xml clean install -P binary-packaging')
140
+ system('cp -rf ci/redshift/target/*.jar lib/gooddata/cloud_resources/redshift/drivers/')
137
141
  end
138
142
  end
139
143
 
@@ -0,0 +1,86 @@
1
+ # encoding: UTF-8
2
+ # frozen_string_literal: true
3
+ #
4
+ # Copyright (c) 2010-2019 GoodData Corporation. All rights reserved.
5
+ # This source code is licensed under the BSD-style license found in the
6
+ # LICENSE file in the root directory of this source tree.
7
+
8
+ require 'securerandom'
9
+ require 'java'
10
+ require 'pathname'
11
+ require_relative '../cloud_resource_client'
12
+
13
+ base = Pathname(__FILE__).dirname.expand_path
14
+ Dir.glob(base + 'drivers/*.jar').each do |file|
15
+ require file unless file.start_with?('lcm-bigquery-driver')
16
+ end
17
+
18
+ java_import 'com.google.auth.oauth2.ServiceAccountCredentials'
19
+ java_import 'com.google.cloud.bigquery.BigQuery'
20
+ java_import 'com.google.cloud.bigquery.BigQueryOptions'
21
+ java_import 'com.google.cloud.bigquery.FieldList'
22
+ java_import 'com.google.cloud.bigquery.FieldValue'
23
+ java_import 'com.google.cloud.bigquery.FieldValueList'
24
+ java_import 'com.google.cloud.bigquery.QueryJobConfiguration'
25
+ java_import 'com.google.cloud.bigquery.TableResult'
26
+ java_import 'org.apache.commons.text.StringEscapeUtils'
27
+
28
+ module GoodData
29
+ module CloudResources
30
+ class BigQueryClient < CloudResourceClient
31
+ class << self
32
+ def accept?(type)
33
+ type == 'bigquery'
34
+ end
35
+ end
36
+
37
+ def initialize(options = {})
38
+ raise("Data Source needs a client to BigQuery to be able to query the storage but 'bigquery_client' is empty.") unless options['bigquery_client']
39
+
40
+ if options['bigquery_client']['connection'].is_a?(Hash)
41
+ @project = options['bigquery_client']['connection']['project']
42
+ @schema = options['bigquery_client']['connection']['schema'] || 'public'
43
+ @authentication = options['bigquery_client']['connection']['authentication']
44
+ else
45
+ raise('Missing connection info for BigQuery client')
46
+
47
+ end
48
+ end
49
+
50
+ def realize_query(query, _params)
51
+ GoodData.gd_logger.info("Realize SQL query: type=bigquery status=started")
52
+
53
+ client = create_client
54
+ filename = "#{SecureRandom.urlsafe_base64(6)}_#{Time.now.to_i}.csv"
55
+ measure = Benchmark.measure do
56
+ query_config = QueryJobConfiguration.newBuilder(query).setDefaultDataset(@schema).build
57
+ table_result = client.query(query_config)
58
+
59
+ if table_result.getTotalRows.positive?
60
+ result = table_result.iterateAll
61
+ field_list = table_result.getSchema.getFields
62
+ col_count = field_list.size
63
+ CSV.open(filename, 'wb') do |csv|
64
+ csv << Array(1..col_count).map { |i| field_list.get(i - 1).getName } # build the header
65
+ result.each do |row|
66
+ csv << Array(1..col_count).map { |i| row.get(i - 1).getValue&.to_s }
67
+ end
68
+ end
69
+ end
70
+ end
71
+ GoodData.gd_logger.info("Realize SQL query: type=bigquery status=finished duration=#{measure.real}")
72
+ filename
73
+ end
74
+
75
+ private
76
+
77
+ def create_client
78
+ GoodData.logger.info "Setting up connection to BigQuery"
79
+ client_email = @authentication['serviceAccount']['clientEmail']
80
+ private_key = @authentication['serviceAccount']['privateKey']
81
+ credentials = ServiceAccountCredentials.fromPkcs8(nil, client_email, StringEscapeUtils.unescapeJson(private_key), nil, nil)
82
+ BigQueryOptions.newBuilder.setProjectId(@project).setCredentials(credentials).build.getService
83
+ end
84
+ end
85
+ end
86
+ end
@@ -1,4 +1,5 @@
1
1
  # encoding: UTF-8
2
+ # frozen_string_literal: true
2
3
  #
3
4
  # Copyright (c) 2010-2019 GoodData Corporation. All rights reserved.
4
5
  # This source code is licensed under the BSD-style license found in the
@@ -57,9 +58,9 @@ module GoodData
57
58
  result = statement.get_result_set
58
59
  metadata = result.get_meta_data
59
60
  col_count = metadata.column_count
60
- CSV.open(filename, 'wb', :force_quotes => true) do |csv|
61
+ CSV.open(filename, 'wb') do |csv|
61
62
  csv << Array(1..col_count).map { |i| metadata.get_column_name(i) } # build the header
62
- csv << Array(1..col_count).map { |i| result.get_string(i) } while result.next
63
+ csv << Array(1..col_count).map { |i| result.get_string(i)&.to_s } while result.next
63
64
  end
64
65
  end
65
66
  end
@@ -0,0 +1,84 @@
1
+ # encoding: UTF-8
2
+ # frozen_string_literal: true
3
+ #
4
+ # Copyright (c) 2010-2019 GoodData Corporation. All rights reserved.
5
+ # This source code is licensed under the BSD-style license found in the
6
+ # LICENSE file in the root directory of this source tree.
7
+
8
+ require 'securerandom'
9
+ require 'java'
10
+ require 'pathname'
11
+ require_relative '../cloud_resource_client'
12
+
13
+ base = Pathname(__FILE__).dirname.expand_path
14
+ Dir.glob(base + 'drivers/*.jar').each do |file|
15
+ require file unless file.start_with?('lcm-snowflake-driver')
16
+ end
17
+
18
+ module GoodData
19
+ module CloudResources
20
+ class SnowflakeClient < CloudResourceClient
21
+ class << self
22
+ def accept?(type)
23
+ type == 'snowflake'
24
+ end
25
+ end
26
+
27
+ def initialize(options = {})
28
+ raise("Data Source needs a client to Snowflake to be able to query the storage but 'snowflake_client' is empty.") unless options['snowflake_client']
29
+
30
+ if options['snowflake_client']['connection'].is_a?(Hash)
31
+ @database = options['snowflake_client']['connection']['database']
32
+ @schema = options['snowflake_client']['connection']['schema'] || 'public'
33
+ @warehouse = options['snowflake_client']['connection']['warehouse']
34
+ @url = options['snowflake_client']['connection']['url']
35
+ @authentication = options['snowflake_client']['connection']['authentication']
36
+ else
37
+ raise('Missing connection info for Snowflake client')
38
+
39
+ end
40
+
41
+ Java.net.snowflake.client.jdbc.SnowflakeDriver
42
+ end
43
+
44
+ def realize_query(query, _params)
45
+ GoodData.gd_logger.info("Realize SQL query: type=snowflake status=started")
46
+
47
+ connect
48
+ filename = "#{SecureRandom.urlsafe_base64(6)}_#{Time.now.to_i}.csv"
49
+ measure = Benchmark.measure do
50
+ statement = @connection.create_statement
51
+
52
+ has_result = statement.execute(query)
53
+ if has_result
54
+ result = statement.get_result_set
55
+ metadata = result.get_meta_data
56
+ col_count = metadata.column_count
57
+ CSV.open(filename, 'wb') do |csv|
58
+ csv << Array(1..col_count).map { |i| metadata.get_column_name(i) } # build the header
59
+ csv << Array(1..col_count).map { |i| result.get_string(i)&.to_s } while result.next
60
+ end
61
+ end
62
+ end
63
+ GoodData.gd_logger.info("Realize SQL query: type=snowflake status=finished duration=#{measure.real}")
64
+ filename
65
+ ensure
66
+ @connection&.close
67
+ @connection = nil
68
+ end
69
+
70
+ def connect
71
+ GoodData.logger.info "Setting up connection to Snowflake #{@url}"
72
+
73
+ prop = java.util.Properties.new
74
+ prop.setProperty('user', @authentication['basic']['userName'])
75
+ prop.setProperty('password', @authentication['basic']['password'])
76
+ prop.setProperty('schema', @schema)
77
+ prop.setProperty('warehouse', @warehouse)
78
+ prop.setProperty('db', @database)
79
+
80
+ @connection = java.sql.DriverManager.getConnection(@url, prop)
81
+ end
82
+ end
83
+ end
84
+ end
@@ -44,7 +44,7 @@ module GoodData
44
44
  realize_link
45
45
  when 's3'
46
46
  realize_s3(params)
47
- when 'redshift'
47
+ when 'redshift', 'snowflake', 'bigquery'
48
48
  raise GoodData::InvalidEnvError, "DataSource does not support type \"#{source}\" on the platform #{RUBY_PLATFORM}" unless RUBY_PLATFORM =~ /java/
49
49
 
50
50
  require_relative '../cloud_resources/cloud_resources'
@@ -0,0 +1,47 @@
1
+ # encoding: UTF-8
2
+ # frozen_string_literal: true
3
+ #
4
+ # Copyright (c) 2010-2020 GoodData Corporation. All rights reserved.
5
+ # This source code is licensed under the BSD-style license found in the
6
+ # LICENSE file in the root directory of this source tree.
7
+ # frozen_string_literal: false
8
+
9
+ module GoodData
10
+ module Helpers
11
+ class << self
12
+ # Get a data source information from server by id
13
+ #
14
+ # @param [String] data_source_id The data source ID
15
+ # @param [Object] client The Rest Client object
16
+ # @return [Hash] Returns Data source
17
+ def get_data_source_by_id(data_source_id, client)
18
+ unless data_source_id.blank?
19
+ uri = "/gdc/dataload/dataSources/#{data_source_id}"
20
+ client.get(uri)
21
+ end
22
+ end
23
+
24
+ # Verify to see if the data source exists in the domain using its alias
25
+ #
26
+ # @param [String] ds_alias The data source's alias
27
+ # @param [Object] client The Rest Client object
28
+ # @return [String] Id of the data source or failed with the reason
29
+ def verify_data_source_alias(ds_alias, client)
30
+ domain = client.connection.server.url
31
+ fail "The data source alias is empty, check your data source configuration." unless ds_alias
32
+
33
+ uri = "/gdc/dataload/dataSources/internal/availableAlias?alias=#{ds_alias[:alias]}"
34
+ res = client.get(uri)
35
+ fail "Unable to get information about the Data Source '#{ds_alias[:alias]}' in the domain '#{domain}'" unless res
36
+ fail "Unable to find the #{ds_alias[:type]} Data Source '#{ds_alias[:alias]}' in the domain '#{domain}'" if res['availableAlias']['available']
37
+
38
+ ds_type = res['availableAlias']['existingDataSource']['type']
39
+ if ds_type && ds_type != ds_alias[:type]
40
+ fail "Wrong Data Source type - the '#{ds_type}' type is expected but the Data Source '#{ds_alias[:alias]}' in the domain '#{domain}' has the '#{ds_alias[:type]}' type"
41
+ else
42
+ res['availableAlias']['existingDataSource']['id']
43
+ end
44
+ end
45
+ end
46
+ end
47
+ end
@@ -242,7 +242,7 @@ module GoodData
242
242
 
243
243
  def resolve_reference_params(data_params, params)
244
244
  reference_values = []
245
- regexps = Regexp.union(/\\\\/, /\\\$/, /\$\{(\w+)\}/)
245
+ regexps = Regexp.union(/\\\\/, /\\\$/, /\$\{([^\n\{\}]+)\}/)
246
246
  resolve_reference = lambda do |v|
247
247
  if v.is_a? Hash
248
248
  Hash[
@@ -262,7 +262,7 @@ module GoodData
262
262
  data_params.is_a?(Hash) ? '\\' : '\\\\' # rubocop: disable Metrics/BlockNesting
263
263
  elsif match =~ /\\\$/
264
264
  '$'
265
- elsif match =~ /\$\{(\w+)\}/
265
+ elsif match =~ /\$\{([^\n\{\}]+)\}/
266
266
  val = params["#{$1}"]
267
267
  if val
268
268
  reference_values << val
@@ -67,11 +67,11 @@ module GoodData
67
67
  end
68
68
 
69
69
  def collect_clients(params, segment_names = nil)
70
- client_id_column = params.client_id_column || 'client_id'
71
- segment_id_column = params.segment_id_column || 'segment_id'
72
- project_id_column = params.project_id_column || 'project_id'
73
- project_title_column = params.project_title_column || 'project_title'
74
- project_token_column = params.project_token_column || 'project_token'
70
+ client_id_column = params.client_id_column&.downcase || 'client_id'
71
+ segment_id_column = params.segment_id_column&.downcase || 'segment_id'
72
+ project_id_column = params.project_id_column&.downcase || 'project_id'
73
+ project_title_column = params.project_title_column&.downcase || 'project_title'
74
+ project_token_column = params.project_token_column&.downcase || 'project_token'
75
75
  client = params.gdc_gd_client
76
76
 
77
77
  clients = []
@@ -82,7 +82,7 @@ module GoodData
82
82
  end
83
83
  GoodData.logger.debug("Input data: #{input_data.read}")
84
84
  GoodData.logger.debug("Segment names: #{segment_names}")
85
- CSV.foreach(input_data, :headers => true, :return_headers => false, encoding: 'utf-8') do |row|
85
+ CSV.foreach(input_data, :headers => true, :return_headers => false, :header_converters => :downcase, :encoding => 'utf-8') do |row|
86
86
  GoodData.logger.debug("Processing row: #{row}")
87
87
  segment_name = row[segment_id_column]
88
88
  GoodData.logger.debug("Segment name: #{segment_name}")
@@ -38,11 +38,11 @@ module GoodData
38
38
  def call(params)
39
39
  return [] unless params.dynamic_params
40
40
 
41
- schedule_title_column = params.schedule_title_column || 'schedule_title'
42
- client_id_column = params.client_id_column || 'client_id'
43
- param_name_column = params.param_name_column || 'param_name'
44
- param_value_column = params.param_value_column || 'param_value'
45
- param_secure_column = params.param_secure_column || 'param_secure'
41
+ schedule_title_column = params.schedule_title_column&.downcase || 'schedule_title'
42
+ client_id_column = params.client_id_column&.downcase || 'client_id'
43
+ param_name_column = params.param_name_column&.downcase || 'param_name'
44
+ param_value_column = params.param_value_column&.downcase || 'param_value'
45
+ param_secure_column = params.param_secure_column&.downcase || 'param_secure'
46
46
 
47
47
  encryption_key = params.dynamic_params_encryption_key || ''
48
48
  exist_encryption_key = encryption_key.blank? ? false : true
@@ -59,7 +59,7 @@ module GoodData
59
59
  schedule_hidden_params = {}
60
60
  exist_param_secure = false
61
61
 
62
- CSV.foreach(input_data, :headers => true, :return_headers => false, encoding: 'utf-8') do |row|
62
+ CSV.foreach(input_data, :headers => true, :return_headers => false, :header_converters => :downcase, :encoding => 'utf-8') do |row|
63
63
  is_param_secure = row[param_secure_column] == 'true'
64
64
  is_decrypt_secure_value = is_param_secure && exist_encryption_key ? true : false
65
65
  exist_param_secure = true if is_param_secure