gooddata 2.1.19-java → 2.2.0-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. checksums.yaml +5 -5
  2. data/.gdc-ii-config.yaml +1 -1
  3. data/.github/workflows/build.yml +66 -0
  4. data/.github/workflows/pre-merge.yml +72 -0
  5. data/CHANGELOG.md +38 -0
  6. data/Dockerfile +21 -14
  7. data/Dockerfile.jruby +1 -11
  8. data/README.md +1 -2
  9. data/SDK_VERSION +1 -1
  10. data/VERSION +1 -1
  11. data/ci/mssql/pom.xml +62 -0
  12. data/ci/mysql/pom.xml +57 -0
  13. data/ci/redshift/pom.xml +1 -1
  14. data/docker-compose.lcm.yml +0 -3
  15. data/gooddata.gemspec +2 -1
  16. data/k8s/charts/lcm-bricks/Chart.yaml +1 -1
  17. data/lcm.rake +2 -8
  18. data/lib/gooddata/bricks/middleware/aws_middleware.rb +35 -9
  19. data/lib/gooddata/cloud_resources/blobstorage/blobstorage_client.rb +98 -0
  20. data/lib/gooddata/cloud_resources/mssql/drivers/.gitkeepme +0 -0
  21. data/lib/gooddata/cloud_resources/mssql/mssql_client.rb +122 -0
  22. data/lib/gooddata/cloud_resources/mysql/drivers/.gitkeepme +0 -0
  23. data/lib/gooddata/cloud_resources/mysql/mysql_client.rb +111 -0
  24. data/lib/gooddata/cloud_resources/postgresql/postgresql_client.rb +0 -1
  25. data/lib/gooddata/cloud_resources/snowflake/snowflake_client.rb +18 -1
  26. data/lib/gooddata/helpers/data_helper.rb +9 -4
  27. data/lib/gooddata/lcm/actions/collect_meta.rb +3 -1
  28. data/lib/gooddata/lcm/actions/migrate_gdc_date_dimension.rb +3 -2
  29. data/lib/gooddata/lcm/actions/synchronize_clients.rb +56 -7
  30. data/lib/gooddata/lcm/actions/synchronize_dataset_mappings.rb +64 -0
  31. data/lib/gooddata/lcm/actions/synchronize_ldm.rb +19 -8
  32. data/lib/gooddata/lcm/actions/synchronize_user_filters.rb +12 -9
  33. data/lib/gooddata/lcm/actions/update_metric_formats.rb +185 -0
  34. data/lib/gooddata/lcm/data/delete_from_lcm_release.sql.erb +5 -0
  35. data/lib/gooddata/lcm/helpers/release_table_helper.rb +42 -8
  36. data/lib/gooddata/lcm/lcm2.rb +5 -0
  37. data/lib/gooddata/mixins/md_object_query.rb +1 -0
  38. data/lib/gooddata/models/data_source.rb +5 -1
  39. data/lib/gooddata/models/dataset_mapping.rb +36 -0
  40. data/lib/gooddata/models/metadata/label.rb +26 -27
  41. data/lib/gooddata/models/project.rb +34 -9
  42. data/lib/gooddata/models/schedule.rb +13 -1
  43. data/lib/gooddata/models/user_filters/user_filter_builder.rb +58 -53
  44. data/lib/gooddata/rest/phmap.rb +1 -0
  45. metadata +44 -18
  46. data/lib/gooddata/bricks/middleware/bulk_salesforce_middleware.rb +0 -37
@@ -124,6 +124,7 @@ module GoodData
124
124
  GoodData.gd_logger.info("Synchronizing in mode=#{mode}, number_of_clients=#{all_clients.size}, data_rows=#{user_filters.size}")
125
125
 
126
126
  GoodData.logger.info("Synchronizing in mode \"#{mode}\"")
127
+ results = []
127
128
  case mode
128
129
  when 'sync_project', 'sync_one_project_based_on_pid', 'sync_one_project_based_on_custom_id'
129
130
  if mode == 'sync_one_project_based_on_pid'
@@ -134,7 +135,9 @@ module GoodData
134
135
  user_filters = user_filters.select { |f| f[:pid] == filter } if filter
135
136
 
136
137
  GoodData.gd_logger.info("Synchronizing in mode=#{mode}, project_id=#{project.pid}, data_rows=#{user_filters.size}")
137
- sync_user_filters(project, user_filters, run_params, symbolized_config)
138
+ current_results = sync_user_filters(project, user_filters, run_params, symbolized_config)
139
+
140
+ results.concat(current_results[:results]) unless current_results.nil? || current_results[:results].empty?
138
141
  when 'sync_multiple_projects_based_on_pid', 'sync_multiple_projects_based_on_custom_id'
139
142
  users_by_project = run_params[:users_brick_input].group_by { |u| u[:pid] }
140
143
  user_filters.group_by { |u| u[:pid] }.flat_map.pmap do |id, new_filters|
@@ -149,7 +152,9 @@ module GoodData
149
152
  end
150
153
 
151
154
  GoodData.gd_logger.info("Synchronizing in mode=#{mode}, project_id=#{id}, data_rows=#{new_filters.size}")
152
- sync_user_filters(current_project, new_filters, run_params.merge(users_brick_input: users), symbolized_config)
155
+ current_results = sync_user_filters(current_project, new_filters, run_params.merge(users_brick_input: users), symbolized_config)
156
+
157
+ results.concat(current_results[:results]) unless current_results.nil? || current_results[:results].empty?
153
158
  end
154
159
  when 'sync_domain_client_workspaces'
155
160
  domain_clients = all_clients
@@ -161,7 +166,6 @@ module GoodData
161
166
  working_client_ids = []
162
167
 
163
168
  users_by_project = run_params[:users_brick_input].group_by { |u| u[:pid] }
164
- results = []
165
169
  user_filters.group_by { |u| u[multiple_projects_column] }.flat_map.pmap do |client_id, new_filters|
166
170
  users = users_by_project[client_id]
167
171
  fail "Client id cannot be empty" if client_id.blank?
@@ -182,7 +186,7 @@ module GoodData
182
186
 
183
187
  GoodData.gd_logger.info("Synchronizing in mode=#{mode}, client_id=#{client_id}, data_rows=#{new_filters.size}")
184
188
  partial_results = sync_user_filters(current_project, new_filters, run_params.merge(users_brick_input: users), symbolized_config)
185
- results.concat(partial_results[:results])
189
+ results.concat(partial_results[:results]) unless partial_results.nil? || partial_results[:results].empty?
186
190
  end
187
191
 
188
192
  unless run_params[:do_not_touch_filters_that_are_not_mentioned]
@@ -197,17 +201,16 @@ module GoodData
197
201
  GoodData.gd_logger.info("Delete all filters in project_id=#{current_project.pid}, client_id=#{c.client_id}")
198
202
  current_results = sync_user_filters(current_project, [], run_params.merge(users_brick_input: users), symbolized_config)
199
203
 
200
- results.concat(current_results[:results])
204
+ results.concat(current_results[:results]) unless current_results.nil? || current_results[:results].empty?
201
205
  rescue StandardError => e
202
206
  params.gdc_logger.error "Failed to clear filters of #{c.client_id} due to: #{e.inspect}"
203
207
  end
204
208
  end
205
209
  end
206
-
207
- {
208
- results: results
209
- }
210
210
  end
211
+ {
212
+ results: results
213
+ }
211
214
  end
212
215
 
213
216
  def sync_user_filters(project, filters, params, filters_config)
@@ -0,0 +1,185 @@
1
+ # encoding: UTF-8
2
+ # frozen_string_literal: true
3
+ # Copyright (c) 2010-2021 GoodData Corporation. All rights reserved.
4
+ # This source code is licensed under the BSD-style license found in the
5
+ # LICENSE file in the root directory of this source tree.
6
+
7
+ require_relative 'base_action'
8
+
9
+ module GoodData
10
+ module LCM2
11
+ class UpdateMetricFormats < BaseAction
12
+ DESCRIPTION = 'Localize Metric Formats'
13
+
14
+ PARAMS = define_params(self) do
15
+ description 'Synchronization Info'
16
+ param :synchronize, array_of(instance_of(Type::SynchronizationInfoType)), required: true, generated: true
17
+
18
+ description 'Client Used for Connecting to GD'
19
+ param :gdc_gd_client, instance_of(Type::GdClientType), required: true
20
+
21
+ description 'Organization Name'
22
+ param :organization, instance_of(Type::StringType), required: false
23
+
24
+ description 'DataProduct to manage'
25
+ param :data_product, instance_of(Type::GDDataProductType), required: false
26
+
27
+ description 'Logger'
28
+ param :gdc_logger, instance_of(Type::GdLogger), required: true
29
+
30
+ description 'ADS Client'
31
+ param :ads_client, instance_of(Type::AdsClientType), required: false
32
+
33
+ description 'Input Source'
34
+ param :input_source, instance_of(Type::HashType), required: false
35
+
36
+ description 'Localization query'
37
+ param :localization_query, instance_of(Type::StringType), required: false
38
+ end
39
+
40
+ RESULT_HEADER = %i[action ok_clients error_clients]
41
+
42
+ class << self
43
+ def load_metric_data(params)
44
+ if params&.dig(:input_source, :metric_format) && params[:input_source][:metric_format].present?
45
+ metric_input_source = validate_input_source(params[:input_source])
46
+ else
47
+ return nil
48
+ end
49
+
50
+ metric_data_source = GoodData::Helpers::DataSource.new(metric_input_source)
51
+ begin
52
+ temp_csv = without_check(PARAMS, params) do
53
+ File.open(metric_data_source.realize(params), 'r:UTF-8')
54
+ end
55
+ rescue StandardError => e
56
+ GoodData.logger.warn("Unable to get metric input source, skip updating metric formats. Error: #{e.message} - #{e}")
57
+ return nil
58
+ end
59
+
60
+ metrics_hash = GoodData::Helpers::Csv.read_as_hash temp_csv
61
+ return nil if metrics_hash.empty?
62
+
63
+ expected_keys = %w[tag client_id format]
64
+ unless expected_keys.map(&:to_sym).all? { |s| metrics_hash.first.key? s }
65
+ GoodData.logger.warn("The input metric data is incorrect, expecting the following fields: #{expected_keys}")
66
+ return nil
67
+ end
68
+ metrics_hash
69
+ end
70
+
71
+ def validate_input_source(input_source)
72
+ type = input_source[:type] if input_source&.dig(:type)
73
+ metric_format = input_source[:metric_format]
74
+ raise "Incorrect configuration: 'type' of 'input_source' is required" if type.blank?
75
+
76
+ modified_input_source = input_source
77
+ case type
78
+ when 'ads', 'redshift', 'snowflake', 'bigquery', 'postgresql', 'mssql', 'mysql'
79
+ if metric_format[:query].blank?
80
+ GoodData.logger.warn("The metric input_source '#{type}' is missing property 'query'")
81
+ return nil
82
+ end
83
+
84
+ modified_input_source[:query] = metric_format[:query]
85
+ return modified_input_source
86
+ when 's3'
87
+ if metric_format[:file].blank?
88
+ GoodData.logger.warn("The metric input_source '#{type}' is missing property 'file'")
89
+ return nil
90
+ end
91
+
92
+ if modified_input_source.key?(:key)
93
+ modified_input_source[:key] = metric_format[:file]
94
+ else
95
+ modified_input_source[:file] = metric_format[:file]
96
+ end
97
+ return modified_input_source
98
+ when 'blobStorage'
99
+ if metric_format[:file].blank?
100
+ GoodData.logger.warn("The metric input_source '#{type}' is missing property 'file'")
101
+ return nil
102
+ end
103
+
104
+ modified_input_source[:file] = metric_format[:file]
105
+ return modified_input_source
106
+ when 'staging'
107
+ if metric_format[:file].blank?
108
+ GoodData.logger.warn("The metric input_source '#{type}' is missing property 'file'")
109
+ return nil
110
+ end
111
+
112
+ modified_input_source[:path] = metric_format[:file]
113
+ return modified_input_source
114
+ when 'web'
115
+ if metric_format[:url].blank?
116
+ GoodData.logger.warn("The metric input_source '#{type}' is missing property 'url'")
117
+ return nil
118
+ end
119
+
120
+ modified_input_source[:url] = metric_format[:url]
121
+ return modified_input_source
122
+ else
123
+ return nil
124
+ end
125
+ end
126
+
127
+ def get_clients_metrics(metric_data)
128
+ return {} if metric_data.nil?
129
+
130
+ metric_groups = {}
131
+ clients = metric_data.map { |row| row[:client_id] }.uniq
132
+ clients.each do |client|
133
+ next if client.blank?
134
+
135
+ formats = {}
136
+ metric_data.select { |row| row[:client_id] == client && row[:tag].present? && row[:format].present? }.each { |row| formats[row[:tag]] = row[:format] }
137
+ metric_groups[client.to_s] ||= formats
138
+ end
139
+ metric_groups
140
+ end
141
+
142
+ def call(params)
143
+ data = load_metric_data(params)
144
+ result = []
145
+ return result if data.nil?
146
+
147
+ metric_group = get_clients_metrics(data)
148
+ return result if metric_group.empty?
149
+
150
+ GoodData.logger.debug("Clients have metrics which will be modified: #{metric_group.keys}")
151
+ updated_clients = params.synchronize.map { |segment| segment.to.map { |client| client[:client_id] } }.flatten.uniq
152
+ GoodData.logger.debug("Updating clients: #{updated_clients}")
153
+ data_product = params.data_product
154
+ data_product_clients = data_product.clients
155
+ number_client_ok = 0
156
+ number_client_error = 0
157
+ metric_group.each do |client_id, formats|
158
+ next unless updated_clients.include?(client_id)
159
+
160
+ client = data_product_clients.find { |c| c.id == client_id }
161
+ begin
162
+ GoodData.logger.info("Start updating metric format for client: '#{client_id}'")
163
+ metrics = client.project.metrics.to_a
164
+ formats.each do |tag, format|
165
+ next if tag.blank? || format.blank?
166
+
167
+ metrics_to_be_updated = metrics.select { |metric| metric.tags.include?(tag) }
168
+ metrics_to_be_updated.each do |metric|
169
+ metric.format = format
170
+ metric.save
171
+ end
172
+ end
173
+ number_client_ok += 1
174
+ GoodData.logger.info("Finished updating metric format for client: '#{client_id}'")
175
+ rescue StandardError => e
176
+ number_client_error += 1
177
+ GoodData.logger.warn("Failed to update metric format for client: '#{client_id}'. Error: #{e.message} - #{e}")
178
+ end
179
+ end
180
+ [{ :action => 'Update metric format', :ok_clients => number_client_ok, :error_clients => number_client_error }]
181
+ end
182
+ end
183
+ end
184
+ end
185
+ end
@@ -0,0 +1,5 @@
1
+ DELETE FROM "<%= table_name || 'LCM_RELEASE' %>"
2
+ WHERE
3
+ segment_id = '<%= segment_id %>'
4
+ AND master_project_id IN (<%= master_project_ids %>)
5
+ ;
@@ -12,6 +12,21 @@ module GoodData
12
12
 
13
13
  class << self
14
14
  def latest_master_project_from_ads(release_table_name, ads_client, segment_id)
15
+ sorted = get_master_project_list_from_ads(release_table_name, ads_client, segment_id)
16
+ sorted.last
17
+ end
18
+
19
+ def latest_master_project_from_nfs(domain_id, data_product_id, segment_id)
20
+ file_path = path_to_release_table_file(domain_id, data_product_id, segment_id)
21
+ sorted = get_master_project_list_from_nfs(domain_id, data_product_id, segment_id)
22
+ latest_master_project = sorted.last
23
+
24
+ version_info = latest_master_project ? "master_pid=#{latest_master_project[:master_project_id]} version=#{latest_master_project[:version]}" : ""
25
+ GoodData.gd_logger.info "Getting latest master project: file=#{file_path} domain=#{domain_id} data_product=#{data_product_id} segment=#{segment_id} #{version_info}"
26
+ latest_master_project
27
+ end
28
+
29
+ def get_master_project_list_from_ads(release_table_name, ads_client, segment_id)
15
30
  replacements = {
16
31
  table_name: release_table_name || DEFAULT_TABLE_NAME,
17
32
  segment_id: segment_id
@@ -22,18 +37,27 @@ module GoodData
22
37
 
23
38
  res = ads_client.execute_select(query)
24
39
  sorted = res.sort_by { |row| row[:version] }
25
- sorted.last
40
+ sorted
26
41
  end
27
42
 
28
- def latest_master_project_from_nfs(domain_id, data_product_id, segment_id)
43
+ def delete_master_project_from_ads(release_table_name, ads_client, segment_id, removal_master_project_ids)
44
+ replacements = {
45
+ table_name: release_table_name || DEFAULT_TABLE_NAME,
46
+ segment_id: segment_id,
47
+ master_project_ids: removal_master_project_ids.map { |x| "'#{x}'" } * ', '
48
+ }
49
+
50
+ path = File.expand_path('../data/delete_from_lcm_release.sql.erb', __dir__)
51
+ query = GoodData::Helpers::ErbHelper.template_file(path, replacements)
52
+
53
+ ads_client.execute(query)
54
+ end
55
+
56
+ def get_master_project_list_from_nfs(domain_id, data_product_id, segment_id)
29
57
  file_path = path_to_release_table_file(domain_id, data_product_id, segment_id)
30
58
  data = GoodData::Helpers::Csv.read_as_hash(file_path)
31
- latest_master_project = data.sort_by { |master| master[:version] }
32
- .reverse.first
33
-
34
- version_info = latest_master_project ? "master_pid=#{latest_master_project[:master_project_id]} version=#{latest_master_project[:version]}" : ""
35
- GoodData.gd_logger.info "Getting latest master project: file=#{file_path} domain=#{domain_id} data_product=#{data_product_id} segment=#{segment_id} #{version_info}"
36
- latest_master_project
59
+ sorted = data.sort_by { |master| master[:version] }
60
+ sorted
37
61
  end
38
62
 
39
63
  def update_latest_master_to_nfs(domain_id, data_product_id, segment_id, master_pid, version)
@@ -46,6 +70,16 @@ module GoodData
46
70
  )
47
71
  end
48
72
 
73
+ def update_master_project_to_nfs(domain_id, data_product_id, segment_id, data)
74
+ file_path = path_to_release_table_file(domain_id, data_product_id, segment_id)
75
+ FileUtils.mkpath(file_path.split('/')[0...-1].join('/'))
76
+ CSV.open(file_path, 'w', write_headers: true, headers: data.first.keys) do |csv|
77
+ data.each do |r|
78
+ csv << r.values
79
+ end
80
+ end
81
+ end
82
+
49
83
  def path_to_release_table_file(domain_id, data_prod_id, segment_id)
50
84
  nsf_directory = ENV['RELEASE_TABLE_NFS_DIRECTORY'] || DEFAULT_NFS_DIRECTORY
51
85
  [nsf_directory, domain_id, data_prod_id + '-' + segment_id + '.csv'].join('/')
@@ -96,6 +96,7 @@ module GoodData
96
96
  CollectComputedAttributeMetrics,
97
97
  ImportObjectCollections,
98
98
  SynchronizeComputedAttributes,
99
+ SynchronizeDataSetMapping,
99
100
  SynchronizeProcesses,
100
101
  SynchronizeSchedules,
101
102
  SynchronizeColorPalette,
@@ -122,9 +123,11 @@ module GoodData
122
123
  AssociateClients,
123
124
  RenameExistingClientProjects,
124
125
  ProvisionClients,
126
+ UpdateMetricFormats,
125
127
  EnsureTechnicalUsersDomain,
126
128
  EnsureTechnicalUsersProject,
127
129
  CollectDymanicScheduleParams,
130
+ SynchronizeDataSetMapping,
128
131
  SynchronizeETLsInSegment
129
132
  ],
130
133
 
@@ -136,8 +139,10 @@ module GoodData
136
139
  EnsureTechnicalUsersDomain,
137
140
  EnsureTechnicalUsersProject,
138
141
  SynchronizeLdm,
142
+ SynchronizeDataSetMapping,
139
143
  MigrateGdcDateDimension,
140
144
  SynchronizeClients,
145
+ UpdateMetricFormats,
141
146
  SynchronizeComputedAttributes,
142
147
  CollectDymanicScheduleParams,
143
148
  SynchronizeETLsInSegment
@@ -54,6 +54,7 @@ module GoodData
54
54
  y << (klass ? client.create(klass, item, project: project) : item)
55
55
  end
56
56
  break if result['objects']['paging']['count'] < page_limit
57
+
57
58
  offset += page_limit
58
59
  end
59
60
  end
@@ -34,7 +34,7 @@ module GoodData
34
34
  c.create(DataSource, ds_data)
35
35
  end
36
36
  else
37
- c.create(DataSource, c.get("#{DATA_SOURCES_URL}/#{id}"))
37
+ c.create(DataSource, c.get(DATA_SOURCES_URL + '/' + id))
38
38
  end
39
39
  end
40
40
 
@@ -177,6 +177,10 @@ module GoodData
177
177
  @json['dataSource']['connectionInfo'][type]
178
178
  end
179
179
 
180
+ def type
181
+ @json['dataSource']['connectionInfo'].first[0].upcase
182
+ end
183
+
180
184
  private
181
185
 
182
186
  def build_connection_info
@@ -0,0 +1,36 @@
1
+ # encoding: UTF-8
2
+ # frozen_string_literal: true
3
+ #
4
+ # Copyright (c) 2010-2021 GoodData Corporation. All rights reserved.
5
+ # This source code is licensed under the BSD-style license found in the
6
+ # LICENSE file in the root directory of this source tree.
7
+
8
+ module GoodData
9
+ class DatasetMapping
10
+ DATASET_MAPPING_GET_URI = '/gdc/dataload/projects/%<project_id>s/modelMapping/datasets'
11
+ DATASET_MAPPING_UPDATE_URI = '/gdc/dataload/projects/%<project_id>s/modelMapping/datasets/bulk/upsert'
12
+
13
+ class << self
14
+ def [](opts = { :client => GoodData.connection, :project => GoodData.project })
15
+ client, project = GoodData.get_client_and_project(opts)
16
+ get_uri = DATASET_MAPPING_GET_URI % { project_id: project.pid }
17
+ res = client.get(get_uri)
18
+ res
19
+ end
20
+
21
+ alias_method :get, :[]
22
+ end
23
+
24
+ def initialize(data)
25
+ @data = data
26
+ end
27
+
28
+ def save(opts)
29
+ client, project = GoodData.get_client_and_project(opts)
30
+
31
+ post_uri = DATASET_MAPPING_UPDATE_URI % { project_id: project.pid }
32
+ res = client.post(post_uri, @data, opts)
33
+ res
34
+ end
35
+ end
36
+ end
@@ -40,22 +40,20 @@ module GoodData
40
40
  end
41
41
  end
42
42
 
43
- # Gets valid elements using /validElements? API
43
+ # Gets valid elements of a label for a specific paging (:offset and :limit) or get validElements of a specific value (:filter).
44
+ # In the case filter a specific value, because the API /validElements only filter by partial match, we need to filter again at client side for exact match.
44
45
  # @return [Array] Results
45
46
  def get_valid_elements(*args)
46
- results, params = valid_elements(*args)
47
- # TMA-775 - the validElements API can possibly return more matches than requested (usually 1)
48
- # so we do a preliminary first request to check and then increase the limit if needed
49
- if results['validElements']['paging']['total'].to_i != params[:limit]
47
+ if args && !args.empty? && args.first[:filter]
48
+ params = args.first
50
49
  params[:limit] = 100_000
51
50
  results, = valid_elements params
52
- if params[:filter]
53
- results['validElements']['items'] = results['validElements']['items'].reject do |i|
54
- i['element']['title'] != params[:filter]
55
- end
51
+ results['validElements']['items'] = results['validElements']['items'].select do |i|
52
+ i['element']['title'] == params[:filter]
56
53
  end
54
+ else
55
+ results, = valid_elements(*args)
57
56
  end
58
-
59
57
  results
60
58
  end
61
59
 
@@ -74,24 +72,25 @@ module GoodData
74
72
  # @option options [Number] :limit limits the number of values to certain number. Default is 100
75
73
  # @return [Array]
76
74
  def values(options = {})
77
- Enumerator.new do |y|
78
- offset = options[:offset] || 0
79
- page_limit = options[:limit] || 100
80
- loop do
81
- results = get_valid_elements(limit: page_limit, offset: offset)
82
-
83
- elements = results['validElements']
84
- elements['items'].map do |el|
85
- v = el['element']
86
- y << {
87
- :value => v['title'],
88
- :uri => v['uri']
89
- }
90
- end
91
- break if elements['items'].count < page_limit
92
- offset += page_limit
75
+ all_values = []
76
+ offset = options[:offset] || 0
77
+ page_limit = options[:limit] || 100
78
+ loop do
79
+ results = get_valid_elements(limit: page_limit, offset: offset)
80
+
81
+ elements = results['validElements']
82
+ elements['items'].map do |el|
83
+ v = el['element']
84
+ all_values << {
85
+ :value => v['title'],
86
+ :uri => v['uri']
87
+ }
93
88
  end
89
+ break if elements['items'].count < page_limit
90
+
91
+ offset += page_limit
94
92
  end
93
+ all_values
95
94
  end
96
95
 
97
96
  def values_count
@@ -136,7 +135,7 @@ module GoodData
136
135
  if status_url
137
136
  results = client.poll_on_response(status_url) do |body|
138
137
  status = body['taskState'] && body['taskState']['status']
139
- status == 'RUNNING' || status == 'PREPARED'
138
+ status == 'RUNNING' || status == 'PREPARED' || body['uri']
140
139
  end
141
140
  end
142
141
 
@@ -30,6 +30,7 @@ require_relative 'process'
30
30
  require_relative 'project_log_formatter'
31
31
  require_relative 'project_role'
32
32
  require_relative 'blueprint/blueprint'
33
+ require_relative 'dataset_mapping'
33
34
 
34
35
  require_relative 'metadata/scheduled_mail'
35
36
  require_relative 'metadata/scheduled_mail/dashboard_attachment'
@@ -255,6 +256,22 @@ module GoodData
255
256
  transfer_schedules(from_project, to_project)
256
257
  end
257
258
 
259
+ def get_dataset_mapping(from_project)
260
+ GoodData::DatasetMapping.get(:client => from_project.client, :project => from_project)
261
+ end
262
+
263
+ def update_dataset_mapping(model_mapping_json, to_project)
264
+ dataset_mapping = GoodData::DatasetMapping.new(model_mapping_json)
265
+ res = dataset_mapping.save(:client => to_project.client, :project => to_project)
266
+ status = res&.dig('datasetMappings', 'items').nil? ? "Failed" : "OK"
267
+ count = "OK".eql?(status) ? res['datasetMappings']['items'].length : 0
268
+ {
269
+ to: to_project.pid,
270
+ count: count,
271
+ status: status
272
+ }
273
+ end
274
+
258
275
  # @param from_project The source project
259
276
  # @param to_project The target project
260
277
  # @param options Optional parameters
@@ -337,20 +354,16 @@ module GoodData
337
354
  def get_data_source_alias(data_source_id, client, aliases)
338
355
  unless aliases[data_source_id]
339
356
  data_source = GoodData::DataSource.from_id(data_source_id, client: client)
340
- if data_source&.dig('dataSource', 'alias')
357
+ if data_source&.alias
341
358
  aliases[data_source_id] = {
342
- :type => get_data_source_type(data_source),
343
- :alias => data_source['dataSource']['alias']
359
+ :type => data_source.type,
360
+ :alias => data_source.alias
344
361
  }
345
362
  end
346
363
  end
347
364
  aliases[data_source_id]
348
365
  end
349
366
 
350
- def get_data_source_type(data_source_data)
351
- data_source_data&.dig('dataSource', 'connectionInfo') ? data_source_data['dataSource']['connectionInfo'].first[0].upcase : ""
352
- end
353
-
354
367
  def replace_process_data_source_ids(process_data, client, aliases)
355
368
  component = process_data.dig(:process, :component)
356
369
  if component&.dig(:configLocation, :dataSourceConfig)
@@ -460,7 +473,9 @@ module GoodData
460
473
  local_stuff = local_schedules.map do |s|
461
474
  v = s.to_hash
462
475
  after_schedule = local_schedules.find { |s2| s.trigger_id == s2.obj_id }
463
- v[:after] = s.trigger_id && after_schedule && after_schedule.name
476
+ after_process_schedule = from_project_processes.find { |p| after_schedule && p.obj_id == after_schedule.process_id }
477
+ v[:after] = s.trigger_id && after_process_schedule && after_schedule && after_schedule.name
478
+ v[:trigger_execution_status] = s.trigger_execution_status
464
479
  v[:remote_schedule] = s
465
480
  v[:params] = v[:params].except("EXECUTABLE", "PROCESS_ID")
466
481
  v.compact
@@ -529,6 +544,7 @@ module GoodData
529
544
  schedule.params = (schedule_spec[:params] || {})
530
545
  schedule.cron = schedule_spec[:cron] if schedule_spec[:cron]
531
546
  schedule.after = schedule_cache[schedule_spec[:after]] if schedule_spec[:after]
547
+ schedule.trigger_execution_status = schedule_cache[schedule_spec[:trigger_execution_status]] if schedule_spec[:after]
532
548
  schedule.hidden_params = schedule_spec[:hidden_params] || {}
533
549
  if process_spec.type != :dataload
534
550
  schedule.executable = schedule_spec[:executable] || (process_spec.type == :ruby ? 'main.rb' : 'main.grf')
@@ -589,7 +605,8 @@ module GoodData
589
605
  hidden_params: schedule_spec[:hidden_params],
590
606
  name: schedule_spec[:name],
591
607
  reschedule: schedule_spec[:reschedule],
592
- state: schedule_spec[:state]
608
+ state: schedule_spec[:state],
609
+ trigger_execution_status: schedule_spec[:trigger_execution_status]
593
610
  }
594
611
  end
595
612
  end
@@ -2022,6 +2039,14 @@ module GoodData
2022
2039
  GoodData::Project.transfer_etl(client, self, target)
2023
2040
  end
2024
2041
 
2042
+ def dataset_mapping
2043
+ GoodData::Project.get_dataset_mapping(self)
2044
+ end
2045
+
2046
+ def update_dataset_mapping(model_mapping_json)
2047
+ GoodData::Project.update_dataset_mapping(model_mapping_json, self)
2048
+ end
2049
+
2025
2050
  def transfer_processes(target)
2026
2051
  GoodData::Project.transfer_processes(self, target)
2027
2052
  end
@@ -101,6 +101,7 @@ module GoodData
101
101
 
102
102
  schedule.name = options[:name]
103
103
  schedule.set_trigger(trigger)
104
+ schedule.trigger_execution_status = options[:trigger_execution_status]
104
105
  schedule.params = default_opts[:params].merge(options[:params] || {})
105
106
  schedule.hidden_params = options[:hidden_params] || {}
106
107
  schedule.timezone = options[:timezone] || default_opts[:timezone]
@@ -468,6 +469,7 @@ module GoodData
468
469
  hidden_params: hidden_params,
469
470
  cron: cron,
470
471
  trigger_id: trigger_id,
472
+ trigger_execution_status: trigger_execution_status,
471
473
  timezone: timezone,
472
474
  uri: uri,
473
475
  reschedule: reschedule,
@@ -486,6 +488,16 @@ module GoodData
486
488
  self
487
489
  end
488
490
 
491
+ def trigger_execution_status
492
+ json['schedule']['triggerExecutionStatus']
493
+ end
494
+
495
+ def trigger_execution_status=(trigger_execution_status)
496
+ json['schedule']['triggerExecutionStatus'] = trigger_execution_status
497
+ @dirty = true
498
+ self # rubocop:disable Lint/Void
499
+ end
500
+
489
501
  def name
490
502
  json['schedule']['name']
491
503
  end
@@ -530,7 +542,7 @@ module GoodData
530
542
  'hiddenParams' => GoodData::Helpers.encode_hidden_params(hidden_params)
531
543
  }
532
544
  }
533
-
545
+ res['schedule']['triggerExecutionStatus'] = trigger_execution_status if trigger_execution_status
534
546
  res['schedule']['reschedule'] = reschedule if reschedule
535
547
 
536
548
  res