superset 0.1.6 → 0.2.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +33 -0
  3. data/README.md +36 -144
  4. data/doc/duplicate_dashboards.md +2 -5
  5. data/doc/migrating_dashboards_across_environments.md +173 -0
  6. data/doc/publishing.md +39 -0
  7. data/doc/setting_up_personal_api_credentials.md +43 -7
  8. data/doc/usage.md +105 -0
  9. data/env.sample +1 -1
  10. data/lib/superset/base_put_request.rb +30 -0
  11. data/lib/superset/chart/create.rb +40 -0
  12. data/lib/superset/chart/duplicate.rb +75 -0
  13. data/lib/superset/chart/put.rb +18 -0
  14. data/lib/superset/chart/update_dataset.rb +1 -1
  15. data/lib/superset/client.rb +7 -1
  16. data/lib/superset/dashboard/bulk_delete_cascade.rb +1 -1
  17. data/lib/superset/dashboard/compare.rb +2 -2
  18. data/lib/superset/dashboard/datasets/list.rb +37 -9
  19. data/lib/superset/dashboard/embedded/get.rb +2 -2
  20. data/lib/superset/dashboard/export.rb +56 -5
  21. data/lib/superset/dashboard/get.rb +5 -0
  22. data/lib/superset/dashboard/import.rb +84 -0
  23. data/lib/superset/dashboard/list.rb +8 -4
  24. data/lib/superset/dashboard/warm_up_cache.rb +1 -1
  25. data/lib/superset/database/export.rb +119 -0
  26. data/lib/superset/database/list.rb +5 -2
  27. data/lib/superset/dataset/get.rb +10 -11
  28. data/lib/superset/dataset/list.rb +1 -1
  29. data/lib/superset/dataset/put.rb +18 -0
  30. data/lib/superset/dataset/update_schema.rb +4 -3
  31. data/lib/superset/file_utilities.rb +4 -3
  32. data/lib/superset/guest_token.rb +14 -7
  33. data/lib/superset/logger.rb +2 -2
  34. data/lib/superset/request.rb +7 -4
  35. data/lib/superset/services/dashboard_loader.rb +69 -0
  36. data/lib/superset/services/duplicate_dashboard.rb +14 -13
  37. data/lib/superset/services/import_dashboard_across_environment.rb +144 -0
  38. data/lib/superset/version.rb +1 -1
  39. metadata +15 -3
@@ -5,12 +5,14 @@
5
5
  module Superset
6
6
  module Dashboard
7
7
  class List < Superset::Request
8
- attr_reader :title_contains, :tags_equal, :ids_not_in
8
+ attr_reader :title_contains, :title_equals, :tags_equal, :ids_not_in, :include_filter_dataset_schemas
9
9
 
10
- def initialize(page_num: 0, title_contains: '', tags_equal: [], ids_not_in: [])
10
+ def initialize(page_num: 0, title_contains: '', title_equals: '', tags_equal: [], ids_not_in: [], include_filter_dataset_schemas: false)
11
11
  @title_contains = title_contains
12
+ @title_equals = title_equals
12
13
  @tags_equal = tags_equal
13
14
  @ids_not_in = ids_not_in
15
+ @include_filter_dataset_schemas = include_filter_dataset_schemas
14
16
  super(page_num: page_num)
15
17
  end
16
18
 
@@ -34,7 +36,7 @@ module Superset
34
36
  end
35
37
 
36
38
  def retrieve_schemas(id)
37
- { schemas: Datasets::List.new(id).schemas }
39
+ { schemas: Datasets::List.new(dashboard_id: id, include_filter_datasets: include_filter_dataset_schemas).schemas }
38
40
  rescue StandardError => e
39
41
  # within Superset, a bug exists around deleting dashboards failing and the corrupting datasets configs, so handle errored datasets gracefully
40
42
  # ref NEP-17532
@@ -42,7 +44,7 @@ module Superset
42
44
  end
43
45
 
44
46
  def retrieve_embedded_details(id)
45
- embedded_dashboard = Dashboard::Embedded::Get.new(id)
47
+ embedded_dashboard = Dashboard::Embedded::Get.new(dashboard_id: id)
46
48
  { allowed_embedded_domains: embedded_dashboard.allowed_domains,
47
49
  uuid: embedded_dashboard.uuid,}
48
50
  end
@@ -69,6 +71,7 @@ module Superset
69
71
  # TODO filtering across all list classes can be refactored to support multiple options in a more flexible way
70
72
  filter_set = []
71
73
  filter_set << "(col:dashboard_title,opr:ct,value:'#{title_contains}')" if title_contains.present?
74
+ filter_set << "(col:dashboard_title,opr:eq,value:'#{title_equals}')" if title_equals.present?
72
75
  filter_set << tag_filters if tags_equal.present?
73
76
  filter_set << ids_not_in_filters if ids_not_in.present?
74
77
  unless filter_set.empty?
@@ -90,6 +93,7 @@ module Superset
90
93
 
91
94
  def validate_constructor_args
92
95
  raise InvalidParameterError, "title_contains must be a String type" unless title_contains.is_a?(String)
96
+ raise InvalidParameterError, "title_equals must be a String type" unless title_equals.is_a?(String)
93
97
  raise InvalidParameterError, "tags_equal must be an Array type" unless tags_equal.is_a?(Array)
94
98
  raise InvalidParameterError, "tags_equal array must contain string only values" unless tags_equal.all? { |item| item.is_a?(String) }
95
99
  raise InvalidParameterError, "ids_not_in must be an Array type" unless ids_not_in.is_a?(Array)
@@ -35,7 +35,7 @@ module Superset
35
35
  end
36
36
 
37
37
  def fetch_dataset_details(dashboard_id)
38
- Superset::Dashboard::Datasets::List.new(dashboard_id).datasets_details.map { |dataset| dataset['database'].slice('name').merge(dataset.slice('datasource_name'))}
38
+ Superset::Dashboard::Datasets::List.new(dashboard_id: dashboard_id).datasets_details.map { |dataset| dataset['database'].slice('name').merge(dataset.slice('datasource_name'))}
39
39
  end
40
40
  end
41
41
  end
@@ -0,0 +1,119 @@
1
+ # Will export the Database zip file to /tmp/superset_database_exports with zip filename adjusted to include the database_id
2
+ # Example zipfile: dashboard_#{database_id}_export_#{datestamp}.zip
3
+ #
4
+ # File will then be unziped and all files copied into the destination_path with the database_id as a subfolder
5
+ # Optianally remove the dataset yaml files from the export
6
+ #
7
+ # Usage
8
+ # Superset::Database::Export.new(database_id: 1, destination_path: '/tmp/superset_database_exports/').perform
9
+
10
+ # Superset::Database::Export.new(database_id: 1, destination_path: '/tmp/superset_database_exports/', remove_dataset_yamls: true).perform
11
+ #
12
+
13
+ require 'superset/file_utilities'
14
+
15
+ module Superset
16
+ module Database
17
+ class Export < Request
18
+ include FileUtilities
19
+
20
+ TMP_SUPERSET_DATABASE_PATH = '/tmp/superset_database_exports'.freeze
21
+
22
+ attr_reader :database_id, :destination_path, :remove_dataset_yamls
23
+
24
+ def initialize(database_id: , destination_path: , remove_dataset_yamls: true)
25
+ @database_id = database_id
26
+ @destination_path = destination_path.chomp('/')
27
+ @remove_dataset_yamls = remove_dataset_yamls
28
+ end
29
+
30
+ def perform
31
+ create_tmp_dir
32
+ save_exported_zip_file
33
+ unzip_files
34
+ copy_export_files_to_destination_path
35
+
36
+ Dir.glob("#{destination_path_with_db_id}/databases/*")
37
+ end
38
+
39
+ def response
40
+ @response ||= client.call(
41
+ :get,
42
+ client.url(route),
43
+ client.param_check(params)
44
+ )
45
+ end
46
+
47
+ def exported_zip_path
48
+ @exported_zip_path ||= "#{tmp_uniq_database_path}/database_#{database_id}_export_#{datestamp}.zip"
49
+ end
50
+
51
+ private
52
+
53
+ def params
54
+ # The Swagger API interface indicates this endpoint should take an array of integers
55
+ # however this does not work within the Swagger interface or when testing the API
56
+ # Investigating the Superset GUI with Dev Tools shows that the format below is used
57
+
58
+ { "q": "!(#{database_id})" }
59
+ end
60
+
61
+ def save_exported_zip_file
62
+ File.open(exported_zip_path, 'wb') { |fp| fp.write(response.body) }
63
+ end
64
+
65
+ def unzip_files
66
+ @extracted_files = unzip_file(exported_zip_path, tmp_uniq_database_path)
67
+ remove_dataset_yaml_files if remove_dataset_yamls
68
+ end
69
+
70
+ def download_folder
71
+ File.dirname(extracted_files[0])
72
+ end
73
+
74
+ def destination_path_with_db_id
75
+ @destination_path_with_db_id ||= File.join(destination_path, database_id.to_s)
76
+ end
77
+
78
+ def copy_export_files_to_destination_path
79
+ FileUtils.mkdir_p(destination_path_with_db_id) unless File.directory?(destination_path_with_db_id)
80
+
81
+ Dir.glob("#{download_folder}/*").each do |item|
82
+ FileUtils.cp_r(item, destination_path_with_db_id)
83
+ end
84
+ end
85
+
86
+ def remove_dataset_yaml_files
87
+ datasets_directories = Dir.glob( File.join(tmp_uniq_database_path, '/*/datasets') )
88
+
89
+ datasets_directories.each do |directory|
90
+ FileUtils.rm_rf(directory) if Dir.exist?(directory)
91
+ end
92
+ end
93
+
94
+ def create_tmp_dir
95
+ FileUtils.mkdir_p(tmp_uniq_database_path) unless File.directory?(tmp_uniq_database_path)
96
+ end
97
+
98
+ def tmp_uniq_database_path
99
+ @tmp_uniq_database_path ||= File.join(TMP_SUPERSET_DATABASE_PATH, uuid)
100
+ end
101
+
102
+ def uuid
103
+ SecureRandom.uuid
104
+ end
105
+
106
+ def extracted_files
107
+ @extracted_files ||= []
108
+ end
109
+
110
+ def route
111
+ "database/export/"
112
+ end
113
+
114
+ def datestamp
115
+ @datestamp ||= Time.now.strftime('%Y%m%d')
116
+ end
117
+ end
118
+ end
119
+ end
@@ -4,10 +4,11 @@
4
4
  module Superset
5
5
  module Database
6
6
  class List < Superset::Request
7
- attr_reader :title_contains
7
+ attr_reader :title_contains, :uuid_equals
8
8
 
9
- def initialize(page_num: 0, title_contains: '')
9
+ def initialize(page_num: 0, title_contains: '', uuid_equals: '')
10
10
  @title_contains = title_contains
11
+ @uuid_equals = uuid_equals
11
12
  super(page_num: page_num)
12
13
  end
13
14
 
@@ -34,6 +35,7 @@ module Superset
34
35
  # TODO filtering across all list classes can be refactored to support multiple options in a more flexible way
35
36
  filter_set = []
36
37
  filter_set << "(col:database_name,opr:ct,value:'#{title_contains}')" if title_contains.present?
38
+ filter_set << "(col:uuid,opr:eq,value:'#{uuid_equals}')" if uuid_equals.present?
37
39
  unless filter_set.empty?
38
40
  "filters:!(" + filter_set.join(',') + "),"
39
41
  end
@@ -45,6 +47,7 @@ module Superset
45
47
 
46
48
  def validate_constructor_args
47
49
  raise InvalidParameterError, "title_contains must be a String type" unless title_contains.is_a?(String)
50
+ raise InvalidParameterError, "uuid_equals must be a String type" unless uuid_equals.is_a?(String)
48
51
  end
49
52
  end
50
53
  end
@@ -29,17 +29,6 @@ module Superset
29
29
  result['name']
30
30
  end
31
31
 
32
- private
33
-
34
- def route
35
- "dataset/#{id}"
36
- end
37
-
38
- def display_headers
39
- %w[title schema database_name, database_id]
40
- end
41
-
42
-
43
32
  def database_name
44
33
  result['database']['database_name']
45
34
  end
@@ -51,6 +40,16 @@ module Superset
51
40
  def sql
52
41
  ['sql']
53
42
  end
43
+
44
+ private
45
+
46
+ def route
47
+ "dataset/#{id}"
48
+ end
49
+
50
+ def display_headers
51
+ %w[title schema database_name, database_id]
52
+ end
54
53
  end
55
54
  end
56
55
  end
@@ -34,7 +34,7 @@ module Superset
34
34
  end
35
35
 
36
36
  def list_attributes
37
- ['id', 'table_name', 'schema', 'changed_by_name']
37
+ ['id', 'table_name', 'database', 'schema', 'changed_by_name']
38
38
  end
39
39
  end
40
40
  end
@@ -0,0 +1,18 @@
1
+ # Updates a dataset in Superset with the given params
2
+ #
3
+ # Usage:
4
+ # params = { owners: [ 58, 3 ] }
5
+ # Superset::Dataset::Put.new(object_id: 101, params: params ).perform
6
+
7
+ module Superset
8
+ module Dataset
9
+ class Put < Superset::BasePutRequest
10
+
11
+ private
12
+
13
+ def route
14
+ "dataset/#{object_id}"
15
+ end
16
+ end
17
+ end
18
+ end
@@ -68,9 +68,10 @@ module Superset
68
68
 
69
69
  def validate_proposed_changes
70
70
  logger.info " Validating Dataset ID: #{source_dataset_id} schema update to #{target_schema} on Database: #{target_database_id}"
71
- raise "Error: source_dataset_id integer is required" unless source_dataset_id.present? && source_dataset_id.is_a?(Integer)
72
- raise "Error: target_database_id integer is required" unless target_database_id.present? && target_database_id.is_a?(Integer)
73
- raise "Error: target_schema string is required" unless target_schema.present? && target_schema.is_a?(String)
71
+ raise "Error: source_dataset_id integer is required" unless source_dataset_id.present? && source_dataset_id.is_a?(Integer)
72
+ raise "Error: target_database_id integer is required" unless target_database_id.present? && target_database_id.is_a?(Integer)
73
+ raise "Error: target_schema string is required" unless target_schema.present? && target_schema.is_a?(String)
74
+ raise "Error: schema must be set on the source dataset" unless source_dataset['schema'].present? # required for validating sql_query_includes_hard_coded_schema
74
75
 
75
76
  # confirm the dataset exist? ... no need as the load_source_dataset method will raise an error if the dataset does not exist
76
77
 
@@ -9,11 +9,12 @@ module Superset
9
9
  entry_path = File.join(destination, entry.name)
10
10
  entries << entry_path
11
11
  FileUtils.mkdir_p(File.dirname(entry_path))
12
- zip.extract(entry, entry_path)
12
+
13
+ zip.extract(entry, entry_path) unless File.exist?(entry_path)
13
14
  end
14
15
  end
15
- puts entries
16
- entries # return array of extracted files
16
+
17
+ entries
17
18
  end
18
19
  end
19
20
  end
@@ -2,14 +2,16 @@ module Superset
2
2
  class GuestToken
3
3
  include Credential::EmbeddedUser
4
4
 
5
- attr_accessor :embedded_dashboard_id, :current_user
5
+ attr_accessor :embedded_dashboard_id, :rls_clause, :additional_params
6
6
 
7
- def initialize(embedded_dashboard_id: , current_user: nil)
7
+ def initialize(embedded_dashboard_id:, rls_clause: [], **additional_params)
8
8
  @embedded_dashboard_id = embedded_dashboard_id
9
- @current_user = current_user
9
+ @rls_clause = rls_clause
10
+ @additional_params = additional_params
10
11
  end
11
12
 
12
13
  def guest_token
14
+ validate_params
13
15
  response_body['token']
14
16
  end
15
17
 
@@ -20,18 +22,23 @@ module Superset
20
22
  "id": embedded_dashboard_id.to_s,
21
23
  "type": "dashboard" }
22
24
  ],
23
- "rls": [],
25
+ "rls": rls_clause, # Ex: [{ "clause": "publisher = 'Nintendo'" }]
24
26
  "user": current_user_params
25
- }
27
+ }.merge(additional_params)
26
28
  end
27
29
 
28
30
  private
29
31
 
32
+ def validate_params
33
+ raise Superset::Request::InvalidParameterError, "rls_clause should be an array. But it is #{rls_clause.class}" if rls_clause.nil? || rls_clause.class != Array
34
+ end
35
+
30
36
  # optional param to be available in Superset for query templating using jinja
31
37
  # ss expects username .. which could be used to query as current_user.id
32
38
  def current_user_params
33
- if current_user
34
- { "username": current_user.id.to_s }
39
+ current_user_id = additional_params[:embedded_app_current_user_id]
40
+ if current_user_id
41
+ { "username": current_user_id.to_s }
35
42
  else
36
43
  { }
37
44
  end
@@ -1,6 +1,6 @@
1
1
  module Superset
2
2
  class Logger
3
-
3
+
4
4
  def info(msg)
5
5
  # puts msg # allow logs to console
6
6
  logger.info msg
@@ -17,4 +17,4 @@ module Superset
17
17
  end
18
18
  end
19
19
  end
20
- end
20
+ end
@@ -5,7 +5,6 @@ module Superset
5
5
  class InvalidParameterError < StandardError; end
6
6
  class ValidationError < StandardError; end
7
7
 
8
-
9
8
  PAGE_SIZE = 100
10
9
 
11
10
  attr_accessor :page_num
@@ -43,8 +42,12 @@ module Superset
43
42
  raise NotImplementedError.new("You must implement route.")
44
43
  end
45
44
 
46
- def client
47
- @client ||= Superset::Client.new
45
+ def client(use_json: true)
46
+ @client ||= begin
47
+ c = Superset::Client.new
48
+ c.config.use_json = use_json
49
+ c
50
+ end
48
51
  end
49
52
 
50
53
  def pagination
@@ -59,4 +62,4 @@ module Superset
59
62
  @logger ||= Superset::Logger.new
60
63
  end
61
64
  end
62
- end
65
+ end
@@ -0,0 +1,69 @@
1
+ # Given a path, load all yaml files
2
+
3
+ require 'superset/file_utilities'
4
+ require 'yaml'
5
+
6
+ module Superset
7
+ module Services
8
+ class DashboardLoader
9
+ include FileUtilities
10
+
11
+ TMP_PATH = '/tmp/superset_dashboard_imports'.freeze
12
+
13
+ attr_reader :dashboard_export_zip
14
+
15
+ def initialize(dashboard_export_zip:)
16
+ @dashboard_export_zip = dashboard_export_zip
17
+ end
18
+
19
+ def perform
20
+ unzip_source_file
21
+ dashboard_config
22
+ end
23
+
24
+ def dashboard_config
25
+ @dashboard_config ||= DashboardConfig.new(
26
+ dashboard_export_zip: dashboard_export_zip,
27
+ tmp_uniq_dashboard_path: tmp_uniq_dashboard_path).config
28
+ end
29
+
30
+ private
31
+
32
+ def unzip_source_file
33
+ @extracted_files = unzip_file(dashboard_export_zip, tmp_uniq_dashboard_path)
34
+ end
35
+
36
+ def tmp_uniq_dashboard_path
37
+ @tmp_uniq_dashboard_path ||= File.join(TMP_PATH, uuid)
38
+ end
39
+
40
+ def uuid
41
+ SecureRandom.uuid
42
+ end
43
+
44
+ class DashboardConfig < ::OpenStruct
45
+ def config
46
+ {
47
+ tmp_uniq_dashboard_path: tmp_uniq_dashboard_path,
48
+ dashboards: load_yamls_for('dashboards'),
49
+ databases: load_yamls_for('databases'),
50
+ datasets: load_yamls_for('datasets'),
51
+ charts: load_yamls_for('charts'),
52
+ metadata: load_yamls_for('metadata.yaml', pattern_sufix: nil),
53
+ }
54
+ end
55
+
56
+ def load_yamls_for(object_path, pattern_sufix: '**/*.yaml')
57
+ pattern = File.join([tmp_uniq_dashboard_path, '**', object_path, pattern_sufix].compact)
58
+ Dir.glob(pattern).map do |file|
59
+ { filename: file, content: load_yaml_and_symbolize_keys(file) } if File.file?(file)
60
+ end.compact
61
+ end
62
+
63
+ def load_yaml_and_symbolize_keys(path)
64
+ YAML.load_file(path).deep_symbolize_keys
65
+ end
66
+ end
67
+ end
68
+ end
69
+ end
@@ -93,13 +93,18 @@ module Superset
93
93
  # duplicate the dataset, renaming to use of suffix as the target_schema
94
94
  # reason: there is a bug(or feature) in the SS API where a dataset name must be uniq when duplicating.
95
95
  # (note however renaming in the GUI to a dup name works fine)
96
- new_dataset_id = Superset::Dataset::Duplicate.new(source_dataset_id: dataset[:id], new_dataset_name: "#{dataset[:datasource_name]}-#{target_schema}").perform
97
-
96
+ new_dataset_name = "#{dataset[:datasource_name]}-#{target_schema}"
97
+ existing_datasets = Superset::Dataset::List.new(title_equals: new_dataset_name, schema_equals: target_schema).result
98
+ if existing_datasets.any?
99
+ logger.info "Dataset #{existing_datasets[0]["table_name"]} already exists. Reusing it"
100
+ new_dataset_id = existing_datasets[0]["id"] # assuming that we do not name multiple datasets with same name in a single schema
101
+ else
102
+ new_dataset_id = Superset::Dataset::Duplicate.new(source_dataset_id: dataset[:id], new_dataset_name: new_dataset_name).perform
103
+ # update the new dataset with the target schema and target database
104
+ Superset::Dataset::UpdateSchema.new(source_dataset_id: new_dataset_id, target_database_id: target_database_id, target_schema: target_schema).perform
105
+ end
98
106
  # keep track of the previous dataset and the matching new dataset_id
99
107
  dataset_duplication_tracker << { source_dataset_id: dataset[:id], new_dataset_id: new_dataset_id }
100
-
101
- # update the new dataset with the target schema and target database
102
- Superset::Dataset::UpdateSchema.new(source_dataset_id: new_dataset_id, target_database_id: target_database_id, target_schema: target_schema).perform
103
108
  end
104
109
  end
105
110
 
@@ -179,7 +184,7 @@ module Superset
179
184
 
180
185
  # retrieve the datasets that will be duplicated
181
186
  def source_dashboard_datasets
182
- @source_dashboard_datasets ||= Superset::Dashboard::Datasets::List.new(source_dashboard_id).datasets_details
187
+ @source_dashboard_datasets ||= Superset::Dashboard::Datasets::List.new(dashboard_id: source_dashboard_id, include_filter_datasets: true).datasets_details
183
188
  rescue => e
184
189
  raise "Unable to retrieve datasets for source dashboard #{source_dashboard_id}: #{e.message}"
185
190
  end
@@ -199,7 +204,7 @@ module Superset
199
204
  raise ValidationError, "The source dashboard datasets are required to point to one schema only. Actual schema list is #{source_dashboard_schemas.join(',')}" if source_dashboard_has_more_than_one_schema?
200
205
  raise ValidationError, "One or more source dashboard filters point to a different schema than the dashboard charts. Identified Unpermittied Filter Dataset Ids are #{unpermitted_filter_dataset_ids.to_s}" if unpermitted_filter_dataset_ids.any?
201
206
 
202
- # new dataset validations
207
+ # new dataset validations - Need to be commented for EU dashboard duplication as we are using the existing datasets for the new dashboard
203
208
  raise ValidationError, "DATASET NAME CONFLICT: The Target Schema #{target_schema} already has existing datasets named: #{target_schema_matching_dataset_names.join(',')}" unless target_schema_matching_dataset_names.empty?
204
209
  validate_source_dashboard_datasets_sql_does_not_hard_code_schema
205
210
 
@@ -241,7 +246,7 @@ module Superset
241
246
  # here we will need to decide if we want to use the existing dataset or not see NEP-????
242
247
  # for now we will exit with an error if we find any existing datasets of the same name
243
248
  def target_schema_matching_dataset_names
244
- source_dashboard_dataset_names.map do |source_dataset_name|
249
+ @target_schema_matching_dataset_names ||= source_dashboard_dataset_names.map do |source_dataset_name|
245
250
  existing_names = Superset::Dataset::List.new(title_contains: source_dataset_name, schema_equals: target_schema).result.map{|t|t['table_name']}.uniq # contains match to cover with suffix as well
246
251
  unless existing_names.flatten.empty?
247
252
  logger.error " HALTING PROCESS: Schema #{target_schema} already has Dataset called #{existing_names}"
@@ -255,11 +260,7 @@ module Superset
255
260
  end
256
261
 
257
262
  def source_dashboard_filter_dataset_ids
258
- filters_configuration = JSON.parse(source_dashboard.result['json_metadata'])['native_filter_configuration'] || []
259
- return Array.new unless filters_configuration && filters_configuration.any?
260
-
261
- # pull only the filters dataset ids from the dashboard
262
- filters_configuration.map { |c| c['targets'] }.flatten.compact.map { |c| c['datasetId'] }.flatten.compact
263
+ @filter_dataset_ids ||= source_dashboard.filter_configuration.map { |c| c['targets'] }.flatten.compact.map { |c| c['datasetId'] }.flatten.compact.uniq
263
264
  end
264
265
 
265
266
  # Primary Assumption is that all charts datasets on the source dashboard are pointing to the same database schema
@@ -0,0 +1,144 @@
1
+ =begin
2
+ This service is used to duplicate a dashboard from one environment to another.
3
+ It will not create any database connections from an imported dashboard zip, therefore the target_database_yaml_file configuration
4
+ must already exist as a database connection in the target superset environment.
5
+
6
+ Currently handles only 1 Database yaml file in the zip file. ( ie only 1 common database connection per dashboards datasets )
7
+
8
+ Required Attributes:
9
+ - target_database_yaml_file - location of the target database yaml config file
10
+ - target_database_schema - the schema name to be used in the target database
11
+ - dashboard_export_zip - location of the source dashboard export zip file to tranferred to a new superset Env
12
+
13
+ Usage:
14
+ Assuming you have exported a dashboard from the source environment and have the zip file, and have exported the target database yaml file
15
+
16
+ Superset::Services::ImportDashboardAcrossEnvironments.new(
17
+ target_database_yaml_file: '/tmp/database.yaml',
18
+ target_database_schema: 'insert_schema_here',
19
+ dashboard_export_zip: '/tmp/dashboard.zip'
20
+ ).perform
21
+
22
+ =end
23
+
24
+ require 'superset/file_utilities'
25
+ require 'yaml'
26
+
27
+ module Superset
28
+ module Services
29
+ class ImportDashboardAcrossEnvironments
30
+ include FileUtilities
31
+
32
+ def initialize(target_database_yaml_file:, target_database_schema: ,dashboard_export_zip:)
33
+ @target_database_yaml_file = target_database_yaml_file
34
+ @target_database_schema = target_database_schema
35
+ @dashboard_export_zip = dashboard_export_zip
36
+ end
37
+
38
+ def perform
39
+ validate_params
40
+
41
+ remove_source_database_config
42
+ insert_target_database_file
43
+ insert_target_database_config
44
+ update_dataset_configs
45
+
46
+ create_new_dashboard_zip
47
+ end
48
+
49
+ def dashboard_config
50
+ @dashboard_config ||= Superset::Services::DashboardLoader.new(dashboard_export_zip: dashboard_export_zip).perform
51
+ end
52
+
53
+ private
54
+
55
+ attr_reader :target_database_yaml_file, :target_database_schema, :dashboard_export_zip
56
+
57
+ def remove_source_database_config
58
+ return if dashboard_config[:databases].blank?
59
+ previous_database_name = dashboard_config[:databases]&.first[:content][:database_name]
60
+ File.delete(dashboard_config[:databases].first[:filename])
61
+
62
+ dashboard_config[:databases].clear
63
+ end
64
+
65
+ def insert_target_database_file
66
+ FileUtils.cp(target_database_yaml_file, File.join(dashboard_export_root_path, 'databases'))
67
+
68
+ pattern = File.join(dashboard_export_root_path, 'databases', '*.yaml')
69
+ @new_database_yaml_file_path = Dir.glob(pattern).first
70
+ end
71
+
72
+ def insert_target_database_config
73
+ yaml_content = YAML.load_file(target_database_yaml_file).deep_symbolize_keys
74
+ dashboard_config[:databases] << { filename: new_database_yaml_file_path, content: yaml_content }
75
+ end
76
+
77
+ def update_dataset_configs
78
+ dashboard_config[:datasets].each do |dataset|
79
+ dataset[:content][:database_uuid] = dashboard_config[:databases].first[:content][:uuid]
80
+ dataset[:content][:schema] = target_database_schema
81
+ stringified_content = deep_transform_keys_to_strings(dataset[:content])
82
+ File.open(dataset[:filename], 'w') { |f| f.write stringified_content.to_yaml }
83
+ end
84
+ end
85
+
86
+ def create_new_dashboard_zip
87
+ Zip::File.open(new_zip_file, Zip::File::CREATE) do |zipfile|
88
+ Dir[File.join(dashboard_export_root_path, '**', '**')].each do |file|
89
+ zipfile.add(file.sub(dashboard_export_root_path + '/', File.basename(dashboard_export_root_path) + '/' ), file) if File.file?(file)
90
+ end
91
+ end
92
+ new_zip_file
93
+ end
94
+
95
+ def new_zip_file
96
+ new_database_name = dashboard_config[:databases].first[:content][:database_name]
97
+ File.join(dashboard_config[:tmp_uniq_dashboard_path], "dashboard_import_for_#{new_database_name}.zip")
98
+ end
99
+
100
+ def new_database_yaml_file_path
101
+ @new_database_yaml_file_path ||= ''
102
+ end
103
+
104
+ def dashboard_export_root_path
105
+ # locate the unziped dashboard_export_* directory as named by superset app, eg dashboard_export_20240821T001536
106
+ @dashboard_export_root_path ||= begin
107
+ pattern = File.join(dashboard_config[:tmp_uniq_dashboard_path], 'dashboard_export_*')
108
+ Dir.glob(pattern).first
109
+ end
110
+
111
+ end
112
+
113
+ def new_database_name
114
+ dashboard_config[:databases].first[:content][:database_name]
115
+ end
116
+
117
+ def previous_database_name
118
+ @previous_database_name ||= ''
119
+ end
120
+
121
+ def validate_params
122
+ raise "Dashboard Export Zip file does not exist" unless File.exist?(dashboard_export_zip)
123
+ raise "Dashboard Export Zip file is not a zip file" unless File.extname(dashboard_export_zip) == '.zip'
124
+ raise "Target Database YAML file does not exist" unless File.exist?(target_database_yaml_file)
125
+ raise "Currently this class handles boards with single Database configs only. Multiple Database configs exist in zip file." if dashboard_config[:databases].size > 1
126
+ raise "Target Database Schema cannot be blank" if target_database_schema.blank?
127
+ end
128
+
129
+ # Method to recursively transform keys to strings
130
+ def deep_transform_keys_to_strings(value)
131
+ case value
132
+ when Hash
133
+ value.each_with_object({}) do |(k, v), result|
134
+ result[k.to_s] = deep_transform_keys_to_strings(v)
135
+ end
136
+ when Array
137
+ value.map { |v| deep_transform_keys_to_strings(v) }
138
+ else
139
+ value
140
+ end
141
+ end
142
+ end
143
+ end
144
+ end