gooddata 2.1.19-java → 2.3.0-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.gdc-ii-config.yaml +42 -1
- data/.github/workflows/build.yml +67 -0
- data/.github/workflows/pre-merge.yml +72 -0
- data/.pronto.yml +1 -0
- data/.rubocop.yml +2 -14
- data/CHANGELOG.md +47 -0
- data/Dockerfile +27 -14
- data/Dockerfile.jruby +5 -15
- data/Dockerfile.ruby +5 -7
- data/Gemfile +4 -2
- data/README.md +6 -6
- data/Rakefile +1 -1
- data/SDK_VERSION +1 -1
- data/VERSION +1 -1
- data/bin/run_brick.rb +7 -0
- data/ci/mssql/pom.xml +62 -0
- data/ci/mysql/pom.xml +62 -0
- data/ci/redshift/pom.xml +4 -5
- data/docker-compose.lcm.yml +42 -4
- data/docker-compose.yml +42 -0
- data/gooddata.gemspec +21 -21
- data/k8s/charts/lcm-bricks/Chart.yaml +1 -1
- data/lcm.rake +11 -8
- data/lib/gooddata/bricks/base_pipeline.rb +26 -0
- data/lib/gooddata/bricks/brick.rb +0 -1
- data/lib/gooddata/bricks/middleware/aws_middleware.rb +35 -9
- data/lib/gooddata/bricks/middleware/execution_result_middleware.rb +3 -3
- data/lib/gooddata/bricks/pipeline.rb +2 -14
- data/lib/gooddata/cloud_resources/blobstorage/blobstorage_client.rb +98 -0
- data/lib/gooddata/cloud_resources/mssql/drivers/.gitkeepme +0 -0
- data/lib/gooddata/cloud_resources/mssql/mssql_client.rb +122 -0
- data/lib/gooddata/cloud_resources/mysql/drivers/.gitkeepme +0 -0
- data/lib/gooddata/cloud_resources/mysql/mysql_client.rb +121 -0
- data/lib/gooddata/cloud_resources/postgresql/postgresql_client.rb +0 -1
- data/lib/gooddata/cloud_resources/redshift/drivers/.gitkeepme +0 -0
- data/lib/gooddata/cloud_resources/redshift/redshift_client.rb +0 -2
- data/lib/gooddata/cloud_resources/snowflake/snowflake_client.rb +18 -1
- data/lib/gooddata/helpers/data_helper.rb +9 -4
- data/lib/gooddata/lcm/actions/base_action.rb +157 -0
- data/lib/gooddata/lcm/actions/collect_data_product.rb +2 -1
- data/lib/gooddata/lcm/actions/collect_meta.rb +3 -1
- data/lib/gooddata/lcm/actions/collect_projects_warning_status.rb +53 -0
- data/lib/gooddata/lcm/actions/collect_segment_clients.rb +14 -0
- data/lib/gooddata/lcm/actions/initialize_continue_on_error_option.rb +87 -0
- data/lib/gooddata/lcm/actions/migrate_gdc_date_dimension.rb +31 -4
- data/lib/gooddata/lcm/actions/provision_clients.rb +34 -5
- data/lib/gooddata/lcm/actions/synchronize_cas.rb +24 -4
- data/lib/gooddata/lcm/actions/synchronize_clients.rb +112 -11
- data/lib/gooddata/lcm/actions/synchronize_dataset_mappings.rb +89 -0
- data/lib/gooddata/lcm/actions/synchronize_etls_in_segment.rb +48 -11
- data/lib/gooddata/lcm/actions/synchronize_kd_dashboard_permission.rb +103 -0
- data/lib/gooddata/lcm/actions/synchronize_ldm.rb +79 -23
- data/lib/gooddata/lcm/actions/synchronize_ldm_layout.rb +98 -0
- data/lib/gooddata/lcm/actions/synchronize_pp_dashboard_permission.rb +108 -0
- data/lib/gooddata/lcm/actions/synchronize_schedules.rb +31 -1
- data/lib/gooddata/lcm/actions/synchronize_user_filters.rb +26 -18
- data/lib/gooddata/lcm/actions/synchronize_user_groups.rb +30 -4
- data/lib/gooddata/lcm/actions/synchronize_users.rb +11 -10
- data/lib/gooddata/lcm/actions/update_metric_formats.rb +202 -0
- data/lib/gooddata/lcm/data/delete_from_lcm_release.sql.erb +5 -0
- data/lib/gooddata/lcm/exceptions/lcm_execution_warning.rb +15 -0
- data/lib/gooddata/lcm/helpers/check_helper.rb +19 -0
- data/lib/gooddata/lcm/helpers/release_table_helper.rb +42 -8
- data/lib/gooddata/lcm/lcm2.rb +50 -4
- data/lib/gooddata/lcm/user_bricks_helper.rb +9 -0
- data/lib/gooddata/mixins/inspector.rb +1 -1
- data/lib/gooddata/mixins/md_object_query.rb +1 -0
- data/lib/gooddata/models/data_source.rb +5 -1
- data/lib/gooddata/models/dataset_mapping.rb +36 -0
- data/lib/gooddata/models/ldm_layout.rb +38 -0
- data/lib/gooddata/models/metadata/label.rb +26 -27
- data/lib/gooddata/models/project.rb +230 -30
- data/lib/gooddata/models/project_creator.rb +83 -6
- data/lib/gooddata/models/schedule.rb +13 -1
- data/lib/gooddata/models/segment.rb +2 -1
- data/lib/gooddata/models/user_filters/user_filter_builder.rb +162 -68
- data/lib/gooddata/rest/connection.rb +5 -3
- data/lib/gooddata/rest/phmap.rb +2 -0
- data/lib/gooddata.rb +1 -0
- data/lib/gooddata_brick_base.rb +35 -0
- data/sonar-project.properties +6 -0
- metadata +96 -65
- data/lib/gooddata/bricks/middleware/bulk_salesforce_middleware.rb +0 -37
- data/lib/gooddata/cloud_resources/redshift/drivers/log4j.properties +0 -15
@@ -30,6 +30,8 @@ require_relative 'process'
|
|
30
30
|
require_relative 'project_log_formatter'
|
31
31
|
require_relative 'project_role'
|
32
32
|
require_relative 'blueprint/blueprint'
|
33
|
+
require_relative 'dataset_mapping'
|
34
|
+
require_relative 'ldm_layout'
|
33
35
|
|
34
36
|
require_relative 'metadata/scheduled_mail'
|
35
37
|
require_relative 'metadata/scheduled_mail/dashboard_attachment'
|
@@ -255,6 +257,45 @@ module GoodData
|
|
255
257
|
transfer_schedules(from_project, to_project)
|
256
258
|
end
|
257
259
|
|
260
|
+
def get_dataset_mapping(from_project)
|
261
|
+
GoodData::DatasetMapping.get(:client => from_project.client, :project => from_project)
|
262
|
+
end
|
263
|
+
|
264
|
+
def update_dataset_mapping(model_mapping_json, to_project)
|
265
|
+
dataset_mapping = GoodData::DatasetMapping.new(model_mapping_json)
|
266
|
+
res = dataset_mapping.save(:client => to_project.client, :project => to_project)
|
267
|
+
status = res&.dig('datasetMappings', 'items').nil? ? "Failed" : "OK"
|
268
|
+
count = "OK".eql?(status) ? res['datasetMappings']['items'].length : 0
|
269
|
+
{
|
270
|
+
to: to_project.pid,
|
271
|
+
count: count,
|
272
|
+
status: status
|
273
|
+
}
|
274
|
+
end
|
275
|
+
|
276
|
+
def get_ldm_layout(from_project)
|
277
|
+
GoodData::LdmLayout.get(:client => from_project.client, :project => from_project)
|
278
|
+
rescue StandardError => e
|
279
|
+
GoodData.logger.warn "An unexpected error when get ldm layout. Error: #{e.message}"
|
280
|
+
GoodData::LdmLayout::DEFAULT_EMPTY_LDM_LAYOUT
|
281
|
+
end
|
282
|
+
|
283
|
+
def save_ldm_layout(ldm_layout_json, to_project)
|
284
|
+
ldm_layout = GoodData::LdmLayout.new(ldm_layout_json)
|
285
|
+
begin
|
286
|
+
ldm_layout.save(:client => to_project.client, :project => to_project)
|
287
|
+
status = "OK"
|
288
|
+
rescue StandardError => e
|
289
|
+
GoodData.logger.warn "An unexpected error when save ldm layout. Error: #{e.message}"
|
290
|
+
status = "Failed"
|
291
|
+
end
|
292
|
+
|
293
|
+
{
|
294
|
+
to: to_project.pid,
|
295
|
+
status: status
|
296
|
+
}
|
297
|
+
end
|
298
|
+
|
258
299
|
# @param from_project The source project
|
259
300
|
# @param to_project The target project
|
260
301
|
# @param options Optional parameters
|
@@ -337,20 +378,16 @@ module GoodData
|
|
337
378
|
def get_data_source_alias(data_source_id, client, aliases)
|
338
379
|
unless aliases[data_source_id]
|
339
380
|
data_source = GoodData::DataSource.from_id(data_source_id, client: client)
|
340
|
-
if data_source&.
|
381
|
+
if data_source&.alias
|
341
382
|
aliases[data_source_id] = {
|
342
|
-
:type =>
|
343
|
-
:alias => data_source
|
383
|
+
:type => data_source.type,
|
384
|
+
:alias => data_source.alias
|
344
385
|
}
|
345
386
|
end
|
346
387
|
end
|
347
388
|
aliases[data_source_id]
|
348
389
|
end
|
349
390
|
|
350
|
-
def get_data_source_type(data_source_data)
|
351
|
-
data_source_data&.dig('dataSource', 'connectionInfo') ? data_source_data['dataSource']['connectionInfo'].first[0].upcase : ""
|
352
|
-
end
|
353
|
-
|
354
391
|
def replace_process_data_source_ids(process_data, client, aliases)
|
355
392
|
component = process_data.dig(:process, :component)
|
356
393
|
if component&.dig(:configLocation, :dataSourceConfig)
|
@@ -403,16 +440,6 @@ module GoodData
|
|
403
440
|
new_group.project = to_project
|
404
441
|
new_group.description = ug.description
|
405
442
|
new_group.save
|
406
|
-
# migrate dashboard "grantees"
|
407
|
-
dashboards = from_project.dashboards
|
408
|
-
dashboards.each do |dashboard|
|
409
|
-
new_dashboard = to_project.dashboards.select { |dash| dash.title == dashboard.title }.first
|
410
|
-
next unless new_dashboard
|
411
|
-
grantee = dashboard.grantees['granteeURIs']['items'].select { |item| item['aclEntryURI']['grantee'].split('/').last == ug.links['self'].split('/').last }.first
|
412
|
-
next unless grantee
|
413
|
-
permission = grantee['aclEntryURI']['permission']
|
414
|
-
new_dashboard.grant(:member => new_group, :permission => permission)
|
415
|
-
end
|
416
443
|
|
417
444
|
{
|
418
445
|
from: from_project.pid,
|
@@ -423,6 +450,68 @@ module GoodData
|
|
423
450
|
end
|
424
451
|
end
|
425
452
|
|
453
|
+
def transfer_dashboard_permission(from_project, to_project, source_dashboards, target_dashboards)
|
454
|
+
source_user_groups = from_project.user_groups
|
455
|
+
target_user_groups = to_project.user_groups
|
456
|
+
|
457
|
+
source_dashboards.each do |source_dashboard|
|
458
|
+
target_dashboard = target_dashboards.select { |dash| dash.title == source_dashboard.title }.first
|
459
|
+
next unless target_dashboard
|
460
|
+
|
461
|
+
begin
|
462
|
+
source_group_dashboards = dashboard_user_groups(source_user_groups, source_dashboard)
|
463
|
+
target_group_dashboards = dashboard_user_groups(target_user_groups, target_dashboard)
|
464
|
+
|
465
|
+
common_group_names = source_group_dashboards.flat_map { |s| target_group_dashboards.select { |t| t[:name] == s[:name] } }.map { |x| [x[:name], true] }.to_h
|
466
|
+
|
467
|
+
remove_user_groups_from_dashboard(target_group_dashboards, target_dashboard, common_group_names)
|
468
|
+
add_user_groups_to_dashboard(source_group_dashboards, target_dashboard, common_group_names, target_user_groups)
|
469
|
+
rescue StandardError => e
|
470
|
+
GoodData.logger.warn "Failed to synchronize dashboard permission from project: '#{from_project.title}', PID: '#{from_project.pid}' to project: '#{to_project.title}', PID: '#{to_project.pid}', dashboard: '#{target_dashboard.title}', dashboard uri: '#{target_dashboard.uri}' . Error: #{e.message} - #{e}" # rubocop:disable Metrics/LineLength
|
471
|
+
end
|
472
|
+
end
|
473
|
+
end
|
474
|
+
|
475
|
+
def dashboard_user_groups(user_groups, dashboard)
|
476
|
+
group_dashboards = []
|
477
|
+
dashboard_grantees = dashboard.grantees['granteeURIs']['items'].select { |item| item['aclEntryURI']['grantee'].include?('/usergroups/') }
|
478
|
+
|
479
|
+
dashboard_grantees.each do |dashboard_grantee|
|
480
|
+
permission = dashboard_grantee['aclEntryURI']['permission']
|
481
|
+
group_id = dashboard_grantee['aclEntryURI']['grantee'].split('/').last
|
482
|
+
user_group = user_groups.select { |group| group.links['self'].split('/').last == group_id }.first
|
483
|
+
next unless user_group
|
484
|
+
|
485
|
+
group_dashboards << {
|
486
|
+
name: user_group.name,
|
487
|
+
user_group: user_group,
|
488
|
+
permission: permission
|
489
|
+
}
|
490
|
+
end
|
491
|
+
group_dashboards
|
492
|
+
end
|
493
|
+
|
494
|
+
def remove_user_groups_from_dashboard(group_dashboards, dashboard, common_group_names)
|
495
|
+
group_dashboards.each do |group_dashboard|
|
496
|
+
group_name = group_dashboard[:name]
|
497
|
+
next if common_group_names && common_group_names[group_name]
|
498
|
+
|
499
|
+
dashboard.revoke(:member => group_dashboard[:user_group], :permission => group_dashboard[:permission])
|
500
|
+
end
|
501
|
+
end
|
502
|
+
|
503
|
+
def add_user_groups_to_dashboard(group_dashboards, dashboard, common_group_names, target_user_groups)
|
504
|
+
group_dashboards.each do |group_dashboard|
|
505
|
+
group_name = group_dashboard[:name]
|
506
|
+
next if common_group_names && common_group_names[group_name]
|
507
|
+
|
508
|
+
target_user_group = target_user_groups.select { |group| group.name == group_name }.first
|
509
|
+
next unless target_user_group
|
510
|
+
|
511
|
+
dashboard.grant(:member => target_user_group, :permission => group_dashboard[:permission])
|
512
|
+
end
|
513
|
+
end
|
514
|
+
|
426
515
|
# Clones project along with etl and schedules.
|
427
516
|
#
|
428
517
|
# @param client [GoodData::Rest::Client] GoodData client to be used for connection
|
@@ -430,7 +519,7 @@ module GoodData
|
|
430
519
|
# Object to be cloned from. Can be either segment in which case we take
|
431
520
|
# the master, client in which case we take its project, string in which
|
432
521
|
# case we treat is as an project object or directly project.
|
433
|
-
def transfer_schedules(from_project, to_project)
|
522
|
+
def transfer_schedules(from_project, to_project, has_cycle_trigger = false)
|
434
523
|
to_project_processes = to_project.processes.sort_by(&:name)
|
435
524
|
from_project_processes = from_project.processes.sort_by(&:name)
|
436
525
|
from_project_processes.reject!(&:add_v2_component?)
|
@@ -460,7 +549,9 @@ module GoodData
|
|
460
549
|
local_stuff = local_schedules.map do |s|
|
461
550
|
v = s.to_hash
|
462
551
|
after_schedule = local_schedules.find { |s2| s.trigger_id == s2.obj_id }
|
463
|
-
|
552
|
+
after_process_schedule = from_project_processes.find { |p| after_schedule && p.obj_id == after_schedule.process_id }
|
553
|
+
v[:after] = s.trigger_id && after_process_schedule && after_schedule && after_schedule.name
|
554
|
+
v[:trigger_execution_status] = s.trigger_execution_status
|
464
555
|
v[:remote_schedule] = s
|
465
556
|
v[:params] = v[:params].except("EXECUTABLE", "PROCESS_ID")
|
466
557
|
v.compact
|
@@ -481,15 +572,23 @@ module GoodData
|
|
481
572
|
end
|
482
573
|
|
483
574
|
results = []
|
575
|
+
update_trigger_schedules = []
|
484
576
|
loop do # rubocop:disable Metrics/BlockLength
|
485
577
|
break if stack.empty?
|
486
578
|
state, changed_schedule = stack.shift
|
579
|
+
lazy_update_trigger_info = false
|
487
580
|
if state == :added
|
488
581
|
schedule_spec = changed_schedule
|
489
582
|
if schedule_spec[:after] && !schedule_cache[schedule_spec[:after]]
|
490
|
-
|
491
|
-
|
583
|
+
if has_cycle_trigger
|
584
|
+
# The schedule is triggered by another schedule
|
585
|
+
lazy_update_trigger_info = true
|
586
|
+
else
|
587
|
+
stack << [state, schedule_spec]
|
588
|
+
next
|
589
|
+
end
|
492
590
|
end
|
591
|
+
|
493
592
|
remote_process, process_spec = cache.find do |_remote, local, schedule|
|
494
593
|
(schedule_spec[:process_id] == local.process_id) && (schedule.name == schedule_spec[:name])
|
495
594
|
end
|
@@ -502,8 +601,21 @@ module GoodData
|
|
502
601
|
if process_spec.type != :dataload
|
503
602
|
executable = schedule_spec[:executable] || (process_spec.type == :ruby ? 'main.rb' : 'main.grf')
|
504
603
|
end
|
604
|
+
|
505
605
|
params = schedule_parameters(schedule_spec)
|
506
|
-
|
606
|
+
|
607
|
+
if lazy_update_trigger_info
|
608
|
+
# Temporary update nil for trigger info. The trigger info will be update late after transfer all schedules
|
609
|
+
created_schedule = remote_process.create_schedule(nil, executable, params)
|
610
|
+
update_trigger_schedules << {
|
611
|
+
state: :added,
|
612
|
+
schedule: created_schedule,
|
613
|
+
after: schedule_spec[:after]
|
614
|
+
}
|
615
|
+
else
|
616
|
+
created_schedule = remote_process.create_schedule(schedule_spec[:cron] || schedule_cache[schedule_spec[:after]], executable, params)
|
617
|
+
end
|
618
|
+
|
507
619
|
schedule_cache[created_schedule.name] = created_schedule
|
508
620
|
|
509
621
|
results << {
|
@@ -514,8 +626,13 @@ module GoodData
|
|
514
626
|
else
|
515
627
|
schedule_spec = changed_schedule[:new_obj]
|
516
628
|
if schedule_spec[:after] && !schedule_cache[schedule_spec[:after]]
|
517
|
-
|
518
|
-
|
629
|
+
if has_cycle_trigger
|
630
|
+
# The schedule is triggered by another schedule
|
631
|
+
lazy_update_trigger_info = true
|
632
|
+
else
|
633
|
+
stack << [state, schedule_spec]
|
634
|
+
next
|
635
|
+
end
|
519
636
|
end
|
520
637
|
|
521
638
|
remote_process, process_spec = cache.find do |i|
|
@@ -528,7 +645,12 @@ module GoodData
|
|
528
645
|
|
529
646
|
schedule.params = (schedule_spec[:params] || {})
|
530
647
|
schedule.cron = schedule_spec[:cron] if schedule_spec[:cron]
|
531
|
-
|
648
|
+
|
649
|
+
unless lazy_update_trigger_info
|
650
|
+
schedule.after = schedule_cache[schedule_spec[:after]] if schedule_spec[:after]
|
651
|
+
schedule.trigger_execution_status = schedule_cache[schedule_spec[:trigger_execution_status]] if schedule_spec[:after]
|
652
|
+
end
|
653
|
+
|
532
654
|
schedule.hidden_params = schedule_spec[:hidden_params] || {}
|
533
655
|
if process_spec.type != :dataload
|
534
656
|
schedule.executable = schedule_spec[:executable] || (process_spec.type == :ruby ? 'main.rb' : 'main.grf')
|
@@ -540,6 +662,15 @@ module GoodData
|
|
540
662
|
schedule.save
|
541
663
|
schedule_cache[schedule.name] = schedule
|
542
664
|
|
665
|
+
if lazy_update_trigger_info
|
666
|
+
update_trigger_schedules << {
|
667
|
+
state: :changed,
|
668
|
+
schedule: schedule,
|
669
|
+
after: schedule_spec[:after],
|
670
|
+
trigger_execution_status: schedule_spec[:trigger_execution_status]
|
671
|
+
}
|
672
|
+
end
|
673
|
+
|
543
674
|
results << {
|
544
675
|
state: :changed,
|
545
676
|
process: remote_process,
|
@@ -548,6 +679,22 @@ module GoodData
|
|
548
679
|
end
|
549
680
|
end
|
550
681
|
|
682
|
+
if has_cycle_trigger
|
683
|
+
update_trigger_schedules.each do |update_trigger_schedule|
|
684
|
+
working_schedule = update_trigger_schedule[:schedule]
|
685
|
+
working_schedule.after = schedule_cache[update_trigger_schedule[:after]]
|
686
|
+
working_schedule.trigger_execution_status = schedule_cache[update_trigger_schedule[:trigger_execution_status]] if update_trigger_schedule[:state] == :changed
|
687
|
+
|
688
|
+
# Update trigger info
|
689
|
+
working_schedule.save
|
690
|
+
|
691
|
+
# Update transfer result
|
692
|
+
results.each do |transfer_result|
|
693
|
+
transfer_result[:schedule] = working_schedule if transfer_result[:schedule].obj_id == working_schedule.obj_id
|
694
|
+
end
|
695
|
+
end
|
696
|
+
end
|
697
|
+
|
551
698
|
diff[:removed].each do |removed_schedule|
|
552
699
|
GoodData.logger.info("Removing schedule #{removed_schedule[:name]}")
|
553
700
|
|
@@ -589,7 +736,8 @@ module GoodData
|
|
589
736
|
hidden_params: schedule_spec[:hidden_params],
|
590
737
|
name: schedule_spec[:name],
|
591
738
|
reschedule: schedule_spec[:reschedule],
|
592
|
-
state: schedule_spec[:state]
|
739
|
+
state: schedule_spec[:state],
|
740
|
+
trigger_execution_status: schedule_spec[:trigger_execution_status]
|
593
741
|
}
|
594
742
|
end
|
595
743
|
end
|
@@ -854,6 +1002,14 @@ module GoodData
|
|
854
1002
|
GoodData::Dashboard[id, project: self, client: client]
|
855
1003
|
end
|
856
1004
|
|
1005
|
+
# Helper for getting analytical dashboards (KD dashboards) of a project
|
1006
|
+
#
|
1007
|
+
# @param id [String | Number | Object] Anything that you can pass to GoodData::Dashboard[id]
|
1008
|
+
# @return [GoodData::AnalyticalDashboard | Array<GoodData::AnalyticalDashboard>] dashboard instance or list
|
1009
|
+
def analytical_dashboards(id = :all)
|
1010
|
+
GoodData::AnalyticalDashboard[id, project: self, client: client]
|
1011
|
+
end
|
1012
|
+
|
857
1013
|
def data_permissions(id = :all)
|
858
1014
|
GoodData::MandatoryUserFilter[id, client: client, project: self]
|
859
1015
|
end
|
@@ -1780,15 +1936,15 @@ module GoodData
|
|
1780
1936
|
end
|
1781
1937
|
|
1782
1938
|
# reassign to groups
|
1939
|
+
removal_user_group_members = []
|
1783
1940
|
mappings = new_users.map(&:to_hash).flat_map do |user|
|
1941
|
+
removal_user_group_members << user[:login] if user[:user_group]&.empty?
|
1784
1942
|
groups = user[:user_group] || []
|
1785
1943
|
groups.map { |g| [user[:login], g] }
|
1786
1944
|
end
|
1945
|
+
|
1787
1946
|
unless mappings.empty?
|
1788
|
-
users_lookup =
|
1789
|
-
a[e.login] = e
|
1790
|
-
a
|
1791
|
-
end
|
1947
|
+
users_lookup = login_users
|
1792
1948
|
mappings.group_by { |_, g| g }.each do |g, mapping|
|
1793
1949
|
remote_users = mapping.map { |user, _| user }.map { |login| users_lookup[login] && users_lookup[login].uri }.reject(&:nil?)
|
1794
1950
|
GoodData.logger.info("Assigning users #{remote_users} to group #{g}")
|
@@ -1802,14 +1958,42 @@ module GoodData
|
|
1802
1958
|
end
|
1803
1959
|
mentioned_groups = mappings.map(&:last).uniq
|
1804
1960
|
groups_to_cleanup = user_groups_cache.reject { |g| mentioned_groups.include?(g.name) }
|
1961
|
+
|
1805
1962
|
# clean all groups not mentioned with exception of whitelisted users
|
1806
1963
|
groups_to_cleanup.each do |g|
|
1807
1964
|
g.set_members(whitelist_users(g.members.map(&:to_hash), [], options[:whitelists], :include).first.map { |x| x[:uri] })
|
1808
1965
|
end
|
1809
1966
|
end
|
1967
|
+
|
1968
|
+
remove_member_from_group(users_lookup, removal_user_group_members, user_groups_cache)
|
1810
1969
|
GoodData::Helpers.join(results, diff_results, [:user], [:login_uri])
|
1811
1970
|
end
|
1812
1971
|
|
1972
|
+
def remove_member_from_group(users_lookup, removal_user_group_members, user_groups_cache)
|
1973
|
+
unless removal_user_group_members.empty?
|
1974
|
+
users_lookup ||= login_users
|
1975
|
+
current_user_groups = user_groups_cache || user_groups
|
1976
|
+
removal_user_group_members.uniq.each do |login|
|
1977
|
+
user_uri = users_lookup[login]&.uri
|
1978
|
+
|
1979
|
+
# remove user from group if exists as group member
|
1980
|
+
current_user_groups.each do |user_group|
|
1981
|
+
if user_group.member?(user_uri)
|
1982
|
+
GoodData.logger.info("Removing #{user_uri} user from group #{user_group.name}")
|
1983
|
+
user_group.remove_members(user_uri)
|
1984
|
+
end
|
1985
|
+
end
|
1986
|
+
end
|
1987
|
+
end
|
1988
|
+
end
|
1989
|
+
|
1990
|
+
def login_users
|
1991
|
+
users.reduce({}) do |a, e|
|
1992
|
+
a[e.login] = e
|
1993
|
+
a
|
1994
|
+
end
|
1995
|
+
end
|
1996
|
+
|
1813
1997
|
def disable_users(list, options = {})
|
1814
1998
|
list = list.map(&:to_hash)
|
1815
1999
|
url = "#{uri}/users"
|
@@ -2022,6 +2206,22 @@ module GoodData
|
|
2022
2206
|
GoodData::Project.transfer_etl(client, self, target)
|
2023
2207
|
end
|
2024
2208
|
|
2209
|
+
def dataset_mapping
|
2210
|
+
GoodData::Project.get_dataset_mapping(self)
|
2211
|
+
end
|
2212
|
+
|
2213
|
+
def update_dataset_mapping(model_mapping_json)
|
2214
|
+
GoodData::Project.update_dataset_mapping(model_mapping_json, self)
|
2215
|
+
end
|
2216
|
+
|
2217
|
+
def ldm_layout
|
2218
|
+
GoodData::Project.get_ldm_layout(self)
|
2219
|
+
end
|
2220
|
+
|
2221
|
+
def save_ldm_layout(ldm_layout_json)
|
2222
|
+
GoodData::Project.save_ldm_layout(ldm_layout_json, self)
|
2223
|
+
end
|
2224
|
+
|
2025
2225
|
def transfer_processes(target)
|
2026
2226
|
GoodData::Project.transfer_processes(self, target)
|
2027
2227
|
end
|
@@ -39,6 +39,9 @@ module GoodData
|
|
39
39
|
opts = { client: GoodData.connection }.merge(opts)
|
40
40
|
dry_run = opts[:dry_run]
|
41
41
|
replacements = opts['maql_replacements'] || opts[:maql_replacements] || {}
|
42
|
+
update_preference = opts[:update_preference]
|
43
|
+
exist_fallback_to_hard_sync_config = !update_preference.nil? && !update_preference[:fallback_to_hard_sync].nil?
|
44
|
+
include_maql_fallback_hard_sync = exist_fallback_to_hard_sync_config && GoodData::Helpers.to_boolean(update_preference[:fallback_to_hard_sync])
|
42
45
|
|
43
46
|
_, project = GoodData.get_client_and_project(opts)
|
44
47
|
|
@@ -48,6 +51,7 @@ module GoodData
|
|
48
51
|
maql_diff_params = [:includeGrain]
|
49
52
|
maql_diff_params << :excludeFactRule if opts[:exclude_fact_rule]
|
50
53
|
maql_diff_params << :includeDeprecated if opts[:include_deprecated]
|
54
|
+
maql_diff_params << :includeMaqlFallbackHardSync if include_maql_fallback_hard_sync
|
51
55
|
|
52
56
|
maql_diff_time = Benchmark.realtime do
|
53
57
|
response = project.maql_diff(blueprint: bp, params: maql_diff_params)
|
@@ -62,7 +66,7 @@ module GoodData
|
|
62
66
|
ca_maql = response['projectModelDiff']['computedAttributesScript'] if response['projectModelDiff']['computedAttributesScript']
|
63
67
|
ca_chunks = ca_maql && ca_maql['maqlDdlChunks']
|
64
68
|
|
65
|
-
maqls = pick_correct_chunks(chunks, opts)
|
69
|
+
maqls = include_maql_fallback_hard_sync ? pick_correct_chunks_hard_sync(chunks, opts) : pick_correct_chunks(chunks, opts)
|
66
70
|
replaced_maqls = apply_replacements_on_maql(maqls, replacements)
|
67
71
|
apply_maqls(ca_chunks, project, replaced_maqls, opts) unless dry_run
|
68
72
|
[replaced_maqls, ca_maql]
|
@@ -72,9 +76,11 @@ module GoodData
|
|
72
76
|
errors = []
|
73
77
|
replaced_maqls.each do |replaced_maql_chunks|
|
74
78
|
begin
|
79
|
+
fallback_hard_sync = replaced_maql_chunks['updateScript']['fallbackHardSync'].nil? ? false : replaced_maql_chunks['updateScript']['fallbackHardSync']
|
75
80
|
replaced_maql_chunks['updateScript']['maqlDdlChunks'].each do |chunk|
|
76
81
|
GoodData.logger.debug(chunk)
|
77
|
-
project.execute_maql(chunk)
|
82
|
+
execute_maql_result = project.execute_maql(chunk)
|
83
|
+
process_fallback_hard_sync_result(execute_maql_result, project) if fallback_hard_sync
|
78
84
|
end
|
79
85
|
rescue => e
|
80
86
|
GoodData.logger.error("Error occured when executing MAQL, project: \"#{project.title}\" reason: \"#{e.message}\", chunks: #{replaced_maql_chunks.inspect}")
|
@@ -140,8 +146,8 @@ module GoodData
|
|
140
146
|
preference = Hash[preference.map { |k, v| [k, GoodData::Helpers.to_boolean(v)] }]
|
141
147
|
|
142
148
|
# will use new parameters instead of the old ones
|
143
|
-
if preference.empty? || [
|
144
|
-
if [
|
149
|
+
if preference.empty? || %i[allow_cascade_drops keep_data].any? { |k| preference.key?(k) }
|
150
|
+
if %i[cascade_drops preserve_data].any? { |k| preference.key?(k) }
|
145
151
|
fail "Please do not mix old parameters (:cascade_drops, :preserve_data) with the new ones (:allow_cascade_drops, :keep_data)."
|
146
152
|
end
|
147
153
|
preference = { allow_cascade_drops: false, keep_data: true }.merge(preference)
|
@@ -174,8 +180,8 @@ module GoodData
|
|
174
180
|
results_from_api = GoodData::Helpers.join(
|
175
181
|
rules,
|
176
182
|
stuff,
|
177
|
-
[
|
178
|
-
[
|
183
|
+
%i[cascade_drops preserve_data],
|
184
|
+
%i[cascade_drops preserve_data],
|
179
185
|
inner: true
|
180
186
|
).sort_by { |l| l[:priority] } || []
|
181
187
|
|
@@ -204,6 +210,53 @@ module GoodData
|
|
204
210
|
end
|
205
211
|
end
|
206
212
|
|
213
|
+
def pick_correct_chunks_hard_sync(chunks, opts = {})
|
214
|
+
preference = GoodData::Helpers.symbolize_keys(opts[:update_preference] || {})
|
215
|
+
preference = Hash[preference.map { |k, v| [k, GoodData::Helpers.to_boolean(v)] }]
|
216
|
+
|
217
|
+
# Old configure using cascade_drops and preserve_data parameters. New configure using allow_cascade_drops and
|
218
|
+
# keep_data parameters. Need translate from new configure to old configure before processing
|
219
|
+
if preference.empty? || %i[allow_cascade_drops keep_data].any? { |k| preference.key?(k) }
|
220
|
+
if %i[cascade_drops preserve_data].any? { |k| preference.key?(k) }
|
221
|
+
fail "Please do not mix old parameters (:cascade_drops, :preserve_data) with the new ones (:allow_cascade_drops, :keep_data)."
|
222
|
+
end
|
223
|
+
|
224
|
+
# Default allow_cascade_drops=false and keep_data=true
|
225
|
+
preference = { allow_cascade_drops: false, keep_data: true }.merge(preference)
|
226
|
+
|
227
|
+
new_preference = {}
|
228
|
+
new_preference[:cascade_drops] = preference[:allow_cascade_drops]
|
229
|
+
new_preference[:preserve_data] = preference[:keep_data]
|
230
|
+
preference = new_preference
|
231
|
+
end
|
232
|
+
preference[:fallback_to_hard_sync] = true
|
233
|
+
|
234
|
+
# Filter chunk with fallbackHardSync = true
|
235
|
+
result = chunks.select do |chunk|
|
236
|
+
chunk['updateScript']['maqlDdlChunks'] && !chunk['updateScript']['fallbackHardSync'].nil? && chunk['updateScript']['fallbackHardSync']
|
237
|
+
end
|
238
|
+
|
239
|
+
# The API model/diff only returns one result for MAQL fallback hard synchronize
|
240
|
+
result = pick_chunks_hard_sync(result[0], preference) if !result.nil? && !result.empty?
|
241
|
+
|
242
|
+
if result.nil? || result.empty?
|
243
|
+
available_chunks = chunks
|
244
|
+
.map do |chunk|
|
245
|
+
{
|
246
|
+
cascade_drops: chunk['updateScript']['cascadeDrops'],
|
247
|
+
preserve_data: chunk['updateScript']['preserveData'],
|
248
|
+
fallback_hard_sync: chunk['updateScript']['fallbackHardSync'].nil? ? false : chunk['updateScript']['fallbackHardSync']
|
249
|
+
}
|
250
|
+
end
|
251
|
+
.map(&:to_s)
|
252
|
+
.join(', ')
|
253
|
+
|
254
|
+
fail "Synchronize LDM cannot proceed. Adjust your update_preferences and try again. Available chunks with preference: #{available_chunks}"
|
255
|
+
end
|
256
|
+
|
257
|
+
result
|
258
|
+
end
|
259
|
+
|
207
260
|
private
|
208
261
|
|
209
262
|
def apply_replacements_on_maql(maqls, replacements = {})
|
@@ -215,6 +268,30 @@ module GoodData
|
|
215
268
|
end
|
216
269
|
end
|
217
270
|
end
|
271
|
+
|
272
|
+
# Fallback hard synchronize although execute result success but some cases there are errors during executing.
|
273
|
+
# In this cases, then export the errors to execution log as warning
|
274
|
+
def process_fallback_hard_sync_result(result, project)
|
275
|
+
messages = result['wTaskStatus']['messages']
|
276
|
+
if !messages.nil? && messages.size.positive?
|
277
|
+
warning_message = GoodData::Helpers.interpolate_error_messages(messages)
|
278
|
+
log_message = "Project #{project.pid} failed to preserve data, truncated data of some datasets. MAQL diff execution messages: \"#{warning_message}\""
|
279
|
+
GoodData.logger.warn(log_message)
|
280
|
+
end
|
281
|
+
end
|
282
|
+
|
283
|
+
# In case fallback hard synchronize, then the API model/diff only returns one result with preserve_data is always false and
|
284
|
+
# cascade_drops is true or false. So pick chunk for fallback hard synchronize, we will ignore the preserve_data parameter
|
285
|
+
# and only check the cascade_drops parameter in preference.
|
286
|
+
def pick_chunks_hard_sync(chunk, preference)
|
287
|
+
# Make sure default values for cascade_drops
|
288
|
+
working_preference = { cascade_drops: false }.merge(preference)
|
289
|
+
if working_preference[:cascade_drops] || chunk['updateScript']['cascadeDrops'] == working_preference[:cascade_drops]
|
290
|
+
[chunk]
|
291
|
+
else
|
292
|
+
[]
|
293
|
+
end
|
294
|
+
end
|
218
295
|
end
|
219
296
|
end
|
220
297
|
end
|
@@ -101,6 +101,7 @@ module GoodData
|
|
101
101
|
|
102
102
|
schedule.name = options[:name]
|
103
103
|
schedule.set_trigger(trigger)
|
104
|
+
schedule.trigger_execution_status = options[:trigger_execution_status]
|
104
105
|
schedule.params = default_opts[:params].merge(options[:params] || {})
|
105
106
|
schedule.hidden_params = options[:hidden_params] || {}
|
106
107
|
schedule.timezone = options[:timezone] || default_opts[:timezone]
|
@@ -468,6 +469,7 @@ module GoodData
|
|
468
469
|
hidden_params: hidden_params,
|
469
470
|
cron: cron,
|
470
471
|
trigger_id: trigger_id,
|
472
|
+
trigger_execution_status: trigger_execution_status,
|
471
473
|
timezone: timezone,
|
472
474
|
uri: uri,
|
473
475
|
reschedule: reschedule,
|
@@ -486,6 +488,16 @@ module GoodData
|
|
486
488
|
self
|
487
489
|
end
|
488
490
|
|
491
|
+
def trigger_execution_status
|
492
|
+
json['schedule']['triggerExecutionStatus']
|
493
|
+
end
|
494
|
+
|
495
|
+
def trigger_execution_status=(trigger_execution_status)
|
496
|
+
json['schedule']['triggerExecutionStatus'] = trigger_execution_status
|
497
|
+
@dirty = true
|
498
|
+
self # rubocop:disable Lint/Void
|
499
|
+
end
|
500
|
+
|
489
501
|
def name
|
490
502
|
json['schedule']['name']
|
491
503
|
end
|
@@ -530,7 +542,7 @@ module GoodData
|
|
530
542
|
'hiddenParams' => GoodData::Helpers.encode_hidden_params(hidden_params)
|
531
543
|
}
|
532
544
|
}
|
533
|
-
|
545
|
+
res['schedule']['triggerExecutionStatus'] = trigger_execution_status if trigger_execution_status
|
534
546
|
res['schedule']['reschedule'] = reschedule if reschedule
|
535
547
|
|
536
548
|
res
|
@@ -257,7 +257,8 @@ module GoodData
|
|
257
257
|
res = client.poll_on_code(res['asyncTask']['links']['poll'])
|
258
258
|
failed_count = GoodData::Helpers.get_path(res, %w(clientProjectProvisioningResult failed count), 0)
|
259
259
|
created_count = GoodData::Helpers.get_path(res, %w(clientProjectProvisioningResult created count), 0)
|
260
|
-
return
|
260
|
+
return [].to_enum if (failed_count + created_count).zero?
|
261
|
+
|
261
262
|
Enumerator.new do |y|
|
262
263
|
uri = GoodData::Helpers.get_path(res, %w(clientProjectProvisioningResult links details))
|
263
264
|
loop do
|