gooddata 2.1.12-java → 2.1.13-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 73f3c8fde12dff87986355749de47fbdbbc334416d032f58c18d993fd5f28967
4
- data.tar.gz: 73e045492e7cccc765c4ad60c08242c53efaf3a92f9c7adf56af9259a22c14e0
3
+ metadata.gz: efc9086e1ddd4dea37bcc2836f3511352e04244d53195ced638d9cf5fc5f2edc
4
+ data.tar.gz: b71fb4b7b838aa0722ac8d1e193680848996691040373b82c821548d022a3adf
5
5
  SHA512:
6
- metadata.gz: 1ab74bd538fecff1536d028993fc62cc287435eeb19163fc3db0b842effd3feca3b746113358c7bd3236c9a9ac15f30ac37361b944cadfad6cfa7161f8219f81
7
- data.tar.gz: 18c48a23901266649fc9f4cf1db10d5f289ea4726f0cc5f63ceb919007f44f2dbbd6b9a37589bc87994c4104136237f49f0540ab7a4648506b5d5521f80c150b
6
+ metadata.gz: 5a2bcab685647330aa37a7201f629c8b4d73105506e0330b116bd7a0b5cbe5bfbd9f0f5838c40eb64319b20d503073ec68248603c7dfbf504f28e8058d9e1d1a
7
+ data.tar.gz: 7d193cdafe6a9ba629b66960415d402cd2a371deeff3f7fcae7311bfcf5c9e07f4f9d4571a7b7612aa430fff8cf15542730892b0907ff974b5a054d14b73577c
@@ -1,4 +1,11 @@
1
1
  # GoodData Ruby SDK Changelog
2
+ ## 2.1.13
3
+ - FEATURE: TMA-1676 Support LCM release across domain
4
+ - FEATURE: TMA-1672 Support sync process with generic datasource
5
+ - FEATURE: MSF-17743 upgrade custom v2 for rollout brick
6
+ - BUGFIX: MSF-17975 Introduce gdcshare to lcm bricks
7
+ - BUGFIX: TMA-1673 Update params processing to accept dot and space
8
+
2
9
  ## 2.1.12
3
10
  - FEATURE: MSF-17621 Apply patched version for activesupport to fix vulnerable issue
4
11
  - CONFIG: SETI-4379 Add gdc-fossa configuration for gooddata-ruby
data/Dockerfile CHANGED
@@ -37,7 +37,9 @@ RUN rvm install jruby-${JRUBY_VERSION} && gem update --system \
37
37
  WORKDIR /src
38
38
 
39
39
  RUN groupadd -g 48 apache \
40
+ && groupadd -g 65065 gdcshare \
40
41
  && useradd -u 48 -m --no-log-init -r -g apache -G rvm apache \
42
+ && usermod -a -G gdcshare apache \
41
43
  && chown apache: /src
42
44
 
43
45
  USER apache
data/README.md CHANGED
@@ -9,6 +9,8 @@ The best documentation for the GoodData API can be found using these resources:
9
9
  * http://developer.gooddata.com/api
10
10
  * https://secure.gooddata.com/gdc
11
11
  * http://rubydoc.info/gems/gooddata/frames
12
+
13
+ Feel free to check out the [GoodData community website](http://community.gooddata.com/) if you have any questions about the GoodData Analytics platform, our API, or this library.
12
14
 
13
15
  ## Status
14
16
 
@@ -1 +1 @@
1
- 2.1.12
1
+ 2.1.13
data/VERSION CHANGED
@@ -1 +1 @@
1
- 3.7.20
1
+ 3.7.22
@@ -0,0 +1,47 @@
1
+ # encoding: UTF-8
2
+ # frozen_string_literal: true
3
+ #
4
+ # Copyright (c) 2010-2020 GoodData Corporation. All rights reserved.
5
+ # This source code is licensed under the BSD-style license found in the
6
+ # LICENSE file in the root directory of this source tree.
7
+ # frozen_string_literal: false
8
+
9
+ module GoodData
10
+ module Helpers
11
+ class << self
12
+ # Get a data source information from server by id
13
+ #
14
+ # @param [String] data_source_id The data source ID
15
+ # @param [Object] client The Rest Client object
16
+ # @return [Hash] Returns Data source
17
+ def get_data_source_by_id(data_source_id, client)
18
+ unless data_source_id.blank?
19
+ uri = "/gdc/dataload/dataSources/#{data_source_id}"
20
+ client.get(uri)
21
+ end
22
+ end
23
+
24
+ # Verify to see if the data source exists in the domain using its alias
25
+ #
26
+ # @param [String] ds_alias The data source's alias
27
+ # @param [Object] client The Rest Client object
28
+ # @return [String] Id of the data source or failed with the reason
29
+ def verify_data_source_alias(ds_alias, client)
30
+ domain = client.connection.server.url
31
+ fail "The data source alias is empty, check your data source configuration." unless ds_alias
32
+
33
+ uri = "/gdc/dataload/dataSources/internal/availableAlias?alias=#{ds_alias[:alias]}"
34
+ res = client.get(uri)
35
+ fail "Unable to get information about the Data Source '#{ds_alias[:alias]}' in the domain '#{domain}'" unless res
36
+ fail "Unable to find the #{ds_alias[:type]} Data Source '#{ds_alias[:alias]}' in the domain '#{domain}'" if res['availableAlias']['available']
37
+
38
+ ds_type = res['availableAlias']['existingDataSource']['type']
39
+ if ds_type && ds_type != ds_alias[:type]
40
+ fail "Wrong Data Source type - the '#{ds_type}' type is expected but the Data Source '#{ds_alias[:alias]}' in the domain '#{domain}' has the '#{ds_alias[:type]}' type"
41
+ else
42
+ res['availableAlias']['existingDataSource']['id']
43
+ end
44
+ end
45
+ end
46
+ end
47
+ end
@@ -242,7 +242,7 @@ module GoodData
242
242
 
243
243
  def resolve_reference_params(data_params, params)
244
244
  reference_values = []
245
- regexps = Regexp.union(/\\\\/, /\\\$/, /\$\{(\w+)\}/)
245
+ regexps = Regexp.union(/\\\\/, /\\\$/, /\$\{([\w\s\.]+)\}/)
246
246
  resolve_reference = lambda do |v|
247
247
  if v.is_a? Hash
248
248
  Hash[
@@ -262,7 +262,7 @@ module GoodData
262
262
  data_params.is_a?(Hash) ? '\\' : '\\\\' # rubocop: disable Metrics/BlockNesting
263
263
  elsif match =~ /\\\$/
264
264
  '$'
265
- elsif match =~ /\$\{(\w+)\}/
265
+ elsif match =~ /\$\{([\w\s\.]+)\}/
266
266
  val = params["#{$1}"]
267
267
  if val
268
268
  reference_values << val
@@ -0,0 +1,116 @@
1
+ # frozen_string_literal: true
2
+ # (C) 2019-2020 GoodData Corporation
3
+ require_relative 'base_action'
4
+
5
+ # Migrate date dimension urn:gooddata:date or urn:custom:date to urn:custom_v2:date
6
+ module GoodData
7
+ module LCM2
8
+ class MigrateGdcDateDimension < BaseAction
9
+ DESCRIPTION = 'Migrate Gdc Date Dimension'
10
+ DATE_DIMENSION_CUSTOM_V2 = 'urn:custom_v2:date'
11
+ DATE_DIMENSION_OLD = %w[urn:gooddata:date urn:custom:date]
12
+
13
+ PARAMS = define_params(self) do
14
+ description 'Client Used for Connecting to GD'
15
+ param :gdc_gd_client, instance_of(Type::GdClientType), required: true
16
+
17
+ description 'Specifies how to synchronize LDM and resolve possible conflicts'
18
+ param :synchronize_ldm, instance_of(Type::SynchronizeLDM), required: false, default: 'diff_against_master_with_fallback'
19
+
20
+ description 'Synchronization Info'
21
+ param :synchronize, array_of(instance_of(Type::SynchronizationInfoType)), required: true, generated: true
22
+ end
23
+
24
+ RESULT_HEADER = %i[from to status]
25
+
26
+ class << self
27
+ def call(params)
28
+ results = []
29
+ params.synchronize.map do |segment_info|
30
+ result = migrate_date_dimension(params, segment_info)
31
+ results.concat(result)
32
+ end
33
+
34
+ {
35
+ results: results
36
+ }
37
+ end
38
+
39
+ def migrate_date_dimension(params, segment_info)
40
+ results = []
41
+ client = params.gdc_gd_client
42
+ latest_blueprint = segment_info[:from_blueprint]
43
+ # don't migrate when latest master doesn't contain custom v2 date.
44
+ return results unless contain_v2?(latest_blueprint)
45
+
46
+ previous_blueprint = segment_info[:previous_master]&.blueprint
47
+ # check latest master and previous master
48
+ master_upgrade_datasets = get_upgrade_dates(latest_blueprint, previous_blueprint) if params[:synchronize_ldm].downcase == 'diff_against_master' && previous_blueprint
49
+ unless master_upgrade_datasets&.empty?
50
+ segment_info[:to].pmap do |entry|
51
+ pid = entry[:pid]
52
+ to_project = client.projects(pid) || fail("Invalid 'to' project specified - '#{pid}'")
53
+ to_blueprint = to_project.blueprint
54
+ upgrade_datasets = get_upgrade_dates(latest_blueprint, to_blueprint)
55
+ next if upgrade_datasets.empty?
56
+
57
+ message = get_upgrade_message(upgrade_datasets)
58
+
59
+ results << {
60
+ from: segment_info[:from],
61
+ to: pid,
62
+ status: to_project.upgrade_custom_v2(message)
63
+ }
64
+ end
65
+ end
66
+
67
+ results
68
+ end
69
+
70
+ def get_upgrade_dates(src_blueprint, dest_blueprint)
71
+ dest_dates = get_date_dimensions(dest_blueprint) if dest_blueprint
72
+ src_dates = get_date_dimensions(src_blueprint) if src_blueprint
73
+
74
+ return false if dest_dates.empty? || src_dates.empty?
75
+
76
+ upgrade_datasets = []
77
+ dest_dates.each do |dest|
78
+ src_dim = get_date_dimension(src_blueprint, dest[:id])
79
+ next unless src_dim
80
+
81
+ upgrade_datasets << src_dim[:identifier] if upgrade?(src_dim, dest) && src_dim[:identifier]
82
+ end
83
+
84
+ upgrade_datasets
85
+ end
86
+
87
+ def get_upgrade_message(upgrade_datasets)
88
+ {
89
+ upgrade: {
90
+ dateDatasets: {
91
+ upgrade: "exact",
92
+ datasets: upgrade_datasets
93
+ }
94
+ }
95
+ }
96
+ end
97
+
98
+ def upgrade?(src_dim, dest_dim)
99
+ src_dim[:urn] == DATE_DIMENSION_CUSTOM_V2 && DATE_DIMENSION_OLD.any? { |e| dest_dim[:urn] == e }
100
+ end
101
+
102
+ def contain_v2?(blueprint)
103
+ get_date_dimensions(blueprint).any? { |e| e[:urn] == DATE_DIMENSION_CUSTOM_V2 }
104
+ end
105
+
106
+ def get_date_dimension(blueprint, id)
107
+ GoodData::Model::ProjectBlueprint.find_date_dimension(blueprint, id)
108
+ end
109
+
110
+ def get_date_dimensions(blueprint)
111
+ GoodData::Model::ProjectBlueprint.date_dimensions(blueprint)
112
+ end
113
+ end
114
+ end
115
+ end
116
+ end
@@ -49,6 +49,8 @@ module GoodData
49
49
  param :include_deprecated, instance_of(Type::BooleanType), required: false, default: false
50
50
  end
51
51
 
52
+ RESULT_HEADER = %i[from to status]
53
+
52
54
  class << self
53
55
  def call(params)
54
56
  results = []
@@ -76,9 +78,9 @@ module GoodData
76
78
  include_deprecated = params.include_deprecated.to_b
77
79
  from_pid = segment_info[:from]
78
80
  from = params.development_client.projects(from_pid) || fail("Invalid 'from' project specified - '#{from_pid}'")
79
-
80
81
  GoodData.logger.info "Creating Blueprint, project: '#{from.title}', PID: #{from_pid}"
81
82
  blueprint = from.blueprint(include_ca: params.include_computed_attributes.to_b)
83
+ segment_info[:from_blueprint] = blueprint
82
84
  maql_diff = nil
83
85
  previous_master = segment_info[:previous_master]
84
86
  diff_against_master = %w(diff_against_master_with_fallback diff_against_master)
@@ -138,6 +138,7 @@ module GoodData
138
138
  EnsureTechnicalUsersDomain,
139
139
  EnsureTechnicalUsersProject,
140
140
  SynchronizeLdm,
141
+ MigrateGdcDateDimension,
141
142
  SynchronizeClients,
142
143
  SynchronizeComputedAttributes,
143
144
  CollectDymanicScheduleParams,
@@ -105,6 +105,7 @@ module GoodData
105
105
  d[:title] = date_dim['dateDimension']['title']
106
106
  d[:urn] = date_dim['dateDimension']['urn']
107
107
  d[:identifier_prefix] = date_dim['dateDimension']['identifierPrefix']
108
+ d[:identifier] = date_dim['dateDimension']['identifier'] if date_dim['dateDimension']['identifier']
108
109
  d[:columns] = parse_bridges(date_dim)
109
110
  end
110
111
  end
@@ -118,11 +118,13 @@ module GoodData
118
118
  GoodData.logger.info("Deploying #{path}") if verbose
119
119
 
120
120
  deployed_path = Process.upload_package(path, files_to_exclude, client: client, project: project)
121
+ data_sources = options[:data_sources] || []
121
122
  data = {
122
123
  :process => {
123
124
  :name => deploy_name,
124
125
  :path => "/uploads/#{File.basename(deployed_path)}",
125
- :type => type
126
+ :type => type,
127
+ :dataSources => data_sources
126
128
  }
127
129
  }
128
130
 
@@ -171,10 +173,12 @@ module GoodData
171
173
  verbose = options[:verbose] || false
172
174
  GoodData.logger.info("Deploying #{path}") if verbose
173
175
 
176
+ data_sources = options[:data_sources] || []
174
177
  data = {
175
178
  process: {
176
179
  name: deploy_name,
177
180
  path: path,
181
+ dataSources: data_sources,
178
182
  type: 'RUBY'
179
183
  }
180
184
  }
@@ -185,7 +189,7 @@ module GoodData
185
189
  def deploy_component(data, options = { client: GoodData.client, project: GoodData.project })
186
190
  client, project = GoodData.get_client_and_project(options)
187
191
  data = { process: data } unless data[:process]
188
- data[:process] = GoodData::Helpers.symbolize_keys(data[:process]).select { |k| %i[type name component].include? k }
192
+ data[:process] = GoodData::Helpers.symbolize_keys(data[:process]).select { |k| %i[type name component dataSources].include? k }
189
193
  data[:process][:component] = GoodData::Helpers.symbolize_keys(data[:process][:component]).select { |k| %i[name version configLocation config].include? k }
190
194
 
191
195
  save(data, options)
@@ -266,7 +270,7 @@ module GoodData
266
270
  # @option options [String] :name Readable name of the process
267
271
  # @option options [Boolean] :verbose (false) Switch on verbose mode for detailed logging
268
272
  def deploy(path, options = {})
269
- Process.deploy(path, { client: client, process_id: process_id, :project => project, :name => name, :type => type }.merge(options))
273
+ Process.deploy(path, { client: client, process_id: process_id, :project => project, :name => name, :type => type, :data_sources => data_sources }.merge(options))
270
274
  end
271
275
 
272
276
  # Downloads the process from S3 in a zipped form.
@@ -326,6 +330,10 @@ module GoodData
326
330
  process['component']
327
331
  end
328
332
 
333
+ def data_sources
334
+ process['dataSources']
335
+ end
336
+
329
337
  # Determines whether the process is an ADDv2 component.
330
338
  # @return [Bool] True if the process is an ADDv2 component.
331
339
  def add_v2_component?
@@ -261,21 +261,26 @@ module GoodData
261
261
  # @option ads_output_stage_uri Uri of the source output stage. It must be in the same domain as the target project.
262
262
  def transfer_processes(from_project, to_project, options = {})
263
263
  options = GoodData::Helpers.symbolize_keys(options)
264
+ aliases = {}
264
265
  to_project_processes = to_project.processes
265
266
  additional_hidden_params = options[:additional_hidden_params] || {}
266
267
  result = from_project.processes.uniq(&:name).map do |process|
267
- fail "The process name #{process.name} must be unique in transfered project #{to_project}" if to_project_processes.count { |p| p.name == process.name } > 1
268
+ fail "The process name #{process.name} must be unique in transferred project #{to_project}" if to_project_processes.count { |p| p.name == process.name } > 1
268
269
  next if process.type == :dataload || process.add_v2_component?
270
+ collect_process_aliases(process.data, from_project.client, aliases)
269
271
 
270
272
  to_process = to_project_processes.find { |p| p.name == process.name }
271
273
 
274
+ data_sources = GoodData::Helpers.symbolize_keys_recursively!(process.data_sources)
275
+ data_sources = replace_data_source_ids(data_sources, to_project.client, aliases)
272
276
  to_process = if process.path
273
277
  to_process.delete if to_process
274
- GoodData::Process.deploy_from_appstore(process.path, name: process.name, client: to_project.client, project: to_project)
278
+ Process.deploy_from_appstore(process.path, name: process.name, client: to_project.client, project: to_project, data_sources: data_sources)
275
279
  elsif process.component
276
280
  to_process.delete if to_process
277
281
  process_hash = GoodData::Helpers::DeepMergeableHash[GoodData::Helpers.symbolize_keys(process.to_hash)].deep_merge(additional_hidden_params)
278
- GoodData::Process.deploy_component(process_hash, project: to_project, client: to_project.client)
282
+ process_hash = replace_process_data_source_ids(process_hash, to_project.client, aliases)
283
+ Process.deploy_component(process_hash, project: to_project, client: to_project.client)
279
284
  else
280
285
  Dir.mktmpdir('etl_transfer') do |dir|
281
286
  dir = Pathname(dir)
@@ -283,11 +288,10 @@ module GoodData
283
288
  File.open(filename, 'w') do |f|
284
289
  f << process.download
285
290
  end
286
-
287
291
  if to_process
288
- to_process.deploy(filename, type: process.type, name: process.name)
292
+ to_process.deploy(filename, type: process.type, name: process.name, data_sources: data_sources)
289
293
  else
290
- to_project.deploy_process(filename, type: process.type, name: process.name)
294
+ to_project.deploy_process(filename, type: process.type, name: process.name, data_sources: data_sources)
291
295
  end
292
296
  end
293
297
  end
@@ -318,6 +322,56 @@ module GoodData
318
322
  result.compact
319
323
  end
320
324
 
325
+ def collect_process_aliases(process_data, client, aliases)
326
+ data_sources = process_data.dig('process', 'dataSources')
327
+ unless data_sources.blank?
328
+ data_sources.map do |data_source|
329
+ get_data_source_alias(data_source['id'], client, aliases)
330
+ end
331
+ end
332
+ component = process_data.dig('process', 'component')
333
+ get_data_source_alias(component['configLocation']['dataSourceConfig']['id'], client, aliases) if component&.dig('configLocation', 'dataSourceConfig')
334
+ aliases
335
+ end
336
+
337
+ def get_data_source_alias(data_source_id, client, aliases)
338
+ unless aliases[data_source_id]
339
+ data_source = GoodData::Helpers.get_data_source_by_id(data_source_id, client)
340
+ if data_source&.dig('dataSource', 'alias')
341
+ aliases[data_source_id] = {
342
+ :type => get_data_source_type(data_source),
343
+ :alias => data_source['dataSource']['alias']
344
+ }
345
+ end
346
+ end
347
+ aliases[data_source_id]
348
+ end
349
+
350
+ def get_data_source_type(data_source_data)
351
+ data_source_data&.dig('dataSource', 'connectionInfo') ? data_source_data['dataSource']['connectionInfo'].first[0].upcase : ""
352
+ end
353
+
354
+ def replace_process_data_source_ids(process_data, client, aliases)
355
+ component = process_data.dig(:process, :component)
356
+ if component&.dig(:configLocation, :dataSourceConfig)
357
+ the_alias = aliases[component[:configLocation][:dataSourceConfig][:id]]
358
+ process_data[:process][:component][:configLocation][:dataSourceConfig][:id] = GoodData::Helpers.verify_data_source_alias(the_alias, client)
359
+ end
360
+ process_data[:process][:dataSources] = replace_data_source_ids(process_data[:process][:dataSources], client, aliases)
361
+ process_data
362
+ end
363
+
364
+ def replace_data_source_ids(data_sources, client, aliases)
365
+ array_data_sources = []
366
+ if data_sources && !data_sources.empty?
367
+ data_sources.map do |data_source|
368
+ new_id = GoodData::Helpers.verify_data_source_alias(aliases[data_source[:id]], client)
369
+ array_data_sources.push(:id => new_id)
370
+ end
371
+ end
372
+ array_data_sources
373
+ end
374
+
321
375
  def transfer_user_groups(from_project, to_project)
322
376
  from_project.user_groups.map do |ug|
323
377
  # migrate groups
@@ -625,6 +679,7 @@ module GoodData
625
679
  def blueprint(options = {})
626
680
  options = { include_ca: true }.merge(options)
627
681
  result = client.get("/gdc/projects/#{pid}/model/view", params: { includeDeprecated: true, includeGrain: true, includeCA: options[:include_ca] })
682
+
628
683
  polling_url = result['asyncTask']['link']['poll']
629
684
  model = client.poll_on_code(polling_url, options)
630
685
  bp = GoodData::Model::FromWire.from_wire(model, options)
@@ -1922,6 +1977,20 @@ module GoodData
1922
1977
  [user, roles]
1923
1978
  end
1924
1979
 
1980
+ def upgrade_custom_v2(message, options = {})
1981
+ uri = "/gdc/md/#{pid}/datedimension/upgrade"
1982
+ poll_result = client&.post(uri, message)
1983
+
1984
+ return poll_result['wTaskStatus']['status'] if poll_result['wTaskStatus'] && poll_result['wTaskStatus']['status']
1985
+
1986
+ polling_uri = poll_result['asyncTask']['link']['poll']
1987
+ result = client&.poll_on_response(polling_uri, options) do |body|
1988
+ body && body['wTaskStatus'] && body['wTaskStatus']['status'] == 'RUNNING'
1989
+ end
1990
+
1991
+ result['wTaskStatus']['status'] == 'OK' ? 'OK' : 'FAIL'
1992
+ end
1993
+
1925
1994
  def add
1926
1995
  @add ||= GoodData::AutomatedDataDistribution.new(self)
1927
1996
  @add
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: gooddata
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.1.12
4
+ version: 2.1.13
5
5
  platform: java
6
6
  authors:
7
7
  - Pavel Kolesnikov
@@ -14,7 +14,7 @@ authors:
14
14
  autorequire:
15
15
  bindir: bin
16
16
  cert_chain: []
17
- date: 2020-06-22 00:00:00.000000000 Z
17
+ date: 2020-07-22 00:00:00.000000000 Z
18
18
  dependencies:
19
19
  - !ruby/object:Gem::Dependency
20
20
  requirement: !ruby/object:Gem::Requirement
@@ -687,6 +687,7 @@ files:
687
687
  - lib/gooddata/helpers/crypto_helper.rb
688
688
  - lib/gooddata/helpers/csv_helper.rb
689
689
  - lib/gooddata/helpers/data_helper.rb
690
+ - lib/gooddata/helpers/data_source_helpers.rb
690
691
  - lib/gooddata/helpers/erb_helper.rb
691
692
  - lib/gooddata/helpers/global_helpers.rb
692
693
  - lib/gooddata/helpers/global_helpers_params.rb
@@ -715,6 +716,7 @@ files:
715
716
  - lib/gooddata/lcm/actions/hello_world.rb
716
717
  - lib/gooddata/lcm/actions/help.rb
717
718
  - lib/gooddata/lcm/actions/import_object_collections.rb
719
+ - lib/gooddata/lcm/actions/migrate_gdc_date_dimension.rb
718
720
  - lib/gooddata/lcm/actions/provision_clients.rb
719
721
  - lib/gooddata/lcm/actions/purge_clients.rb
720
722
  - lib/gooddata/lcm/actions/rename_existing_client_projects.rb