superset 0.2.6 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.rubocop.yml +1 -1
- data/.ruby-version +1 -1
- data/CHANGELOG.md +16 -0
- data/Dockerfile +2 -2
- data/README.md +6 -3
- data/doc/duplicate_dashboards.md +45 -6
- data/lib/superset/base_put_request.rb +4 -4
- data/lib/superset/chart/bulk_delete.rb +1 -1
- data/lib/superset/chart/delete.rb +1 -1
- data/lib/superset/chart/get.rb +7 -11
- data/lib/superset/chart/list.rb +5 -5
- data/lib/superset/chart/put.rb +2 -2
- data/lib/superset/dashboard/bulk_delete.rb +1 -1
- data/lib/superset/dashboard/cascade_ownership/add_new_owner.rb +54 -0
- data/lib/superset/dashboard/datasets/list.rb +15 -15
- data/lib/superset/dashboard/delete.rb +1 -1
- data/lib/superset/dashboard/list.rb +35 -9
- data/lib/superset/dashboard/put.rb +7 -24
- data/lib/superset/dashboard/warm_up_cache.rb +1 -1
- data/lib/superset/database/get_catalogs.rb +38 -0
- data/lib/superset/database/list.rb +2 -2
- data/lib/superset/dataset/bulk_delete.rb +1 -1
- data/lib/superset/dataset/delete.rb +1 -1
- data/lib/superset/dataset/get.rb +8 -4
- data/lib/superset/dataset/list.rb +2 -2
- data/lib/superset/dataset/put.rb +2 -2
- data/lib/superset/dataset/update_schema.rb +4 -2
- data/lib/superset/display.rb +19 -10
- data/lib/superset/request.rb +6 -4
- data/lib/superset/security/permissions_resources/list.rb +2 -2
- data/lib/superset/security/user/list.rb +2 -2
- data/lib/superset/services/duplicate_dashboard.rb +72 -26
- data/lib/superset/services/import_dashboard_across_environment.rb +3 -0
- data/lib/superset/tag/add_to_object.rb +5 -5
- data/lib/superset/tag/list.rb +1 -1
- data/lib/superset/version.rb +1 -1
- data/superset.gemspec +12 -13
- metadata +63 -65
|
@@ -3,12 +3,12 @@ module Superset
|
|
|
3
3
|
class List < Superset::Request
|
|
4
4
|
attr_reader :title_contains, :title_equals, :schema_equals, :database_id_eq
|
|
5
5
|
|
|
6
|
-
def initialize(
|
|
6
|
+
def initialize(title_contains: '', title_equals: '', schema_equals: '', database_id_eq: '', **kwargs)
|
|
7
7
|
@title_contains = title_contains
|
|
8
8
|
@title_equals = title_equals
|
|
9
9
|
@schema_equals = schema_equals
|
|
10
10
|
@database_id_eq = database_id_eq
|
|
11
|
-
super(
|
|
11
|
+
super(**kwargs)
|
|
12
12
|
end
|
|
13
13
|
|
|
14
14
|
def self.call
|
data/lib/superset/dataset/put.rb
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
#
|
|
3
3
|
# Usage:
|
|
4
4
|
# params = { owners: [ 58, 3 ] }
|
|
5
|
-
# Superset::Dataset::Put.new(
|
|
5
|
+
# Superset::Dataset::Put.new(target_id: 101, params: params ).perform
|
|
6
6
|
|
|
7
7
|
module Superset
|
|
8
8
|
module Dataset
|
|
@@ -11,7 +11,7 @@ module Superset
|
|
|
11
11
|
private
|
|
12
12
|
|
|
13
13
|
def route
|
|
14
|
-
"dataset/#{
|
|
14
|
+
"dataset/#{target_id}"
|
|
15
15
|
end
|
|
16
16
|
end
|
|
17
17
|
end
|
|
@@ -2,12 +2,13 @@ module Superset
|
|
|
2
2
|
module Dataset
|
|
3
3
|
class UpdateSchema < Superset::Request
|
|
4
4
|
|
|
5
|
-
attr_reader :source_dataset_id, :target_database_id, :target_schema, :remove_copy_suffix
|
|
5
|
+
attr_reader :source_dataset_id, :target_database_id, :target_schema, :target_catalog, :remove_copy_suffix
|
|
6
6
|
|
|
7
|
-
def initialize(source_dataset_id: , target_database_id: , target_schema: , remove_copy_suffix: false)
|
|
7
|
+
def initialize(source_dataset_id: , target_database_id: , target_schema: , target_catalog: nil, remove_copy_suffix: false)
|
|
8
8
|
@source_dataset_id = source_dataset_id
|
|
9
9
|
@target_database_id = target_database_id
|
|
10
10
|
@target_schema = target_schema
|
|
11
|
+
@target_catalog = target_catalog
|
|
11
12
|
@remove_copy_suffix = remove_copy_suffix
|
|
12
13
|
end
|
|
13
14
|
|
|
@@ -37,6 +38,7 @@ module Superset
|
|
|
37
38
|
|
|
38
39
|
# primary database and schema changes
|
|
39
40
|
new_params.merge!("database_id": target_database_id) # add the target database id
|
|
41
|
+
new_params['catalog'] = target_catalog
|
|
40
42
|
new_params['schema'] = target_schema
|
|
41
43
|
new_params['owners'] = new_params['owners'].map {|o| o['id'] } # expects an array of user ids
|
|
42
44
|
new_params['table_name'] = new_params['table_name'].gsub(/ \(COPY\)/, '') if remove_copy_suffix
|
data/lib/superset/display.rb
CHANGED
|
@@ -7,28 +7,37 @@ module Superset
|
|
|
7
7
|
def table
|
|
8
8
|
Terminal::Table.new(
|
|
9
9
|
title: title,
|
|
10
|
-
headings:
|
|
10
|
+
headings: list_attributes.map(&:to_s).map(&:humanize),
|
|
11
11
|
rows: rows
|
|
12
12
|
)
|
|
13
13
|
end
|
|
14
14
|
|
|
15
15
|
def rows
|
|
16
|
-
result.
|
|
17
|
-
list_attributes.map { |la|
|
|
16
|
+
if result.is_a?(Hash)
|
|
17
|
+
list_attributes.map { |la| result[la].to_s }
|
|
18
|
+
else
|
|
19
|
+
result.map do |d|
|
|
20
|
+
list_attributes.map { |la| d[la].to_s }
|
|
21
|
+
end
|
|
18
22
|
end
|
|
19
23
|
end
|
|
20
24
|
|
|
21
|
-
def
|
|
22
|
-
|
|
25
|
+
def to_h
|
|
26
|
+
if result.is_a?(Hash)
|
|
27
|
+
list_attributes.to_h { |la| [la, result[la]] }
|
|
28
|
+
else
|
|
29
|
+
result.map do |d|
|
|
30
|
+
list_attributes.to_h { |la| [la, d[la]] }
|
|
31
|
+
end
|
|
32
|
+
end
|
|
23
33
|
end
|
|
24
34
|
|
|
25
|
-
def
|
|
26
|
-
|
|
27
|
-
headings.map(&:to_s).map(&:humanize)
|
|
35
|
+
def ids
|
|
36
|
+
result.map { |d| d[:id] }
|
|
28
37
|
end
|
|
29
38
|
|
|
30
|
-
def
|
|
31
|
-
|
|
39
|
+
def title
|
|
40
|
+
self.class.to_s
|
|
32
41
|
end
|
|
33
42
|
|
|
34
43
|
def list_attributes
|
data/lib/superset/request.rb
CHANGED
|
@@ -5,12 +5,13 @@ module Superset
|
|
|
5
5
|
class InvalidParameterError < StandardError; end
|
|
6
6
|
class ValidationError < StandardError; end
|
|
7
7
|
|
|
8
|
-
|
|
8
|
+
DEFAULT_PAGE_SIZE = 100
|
|
9
9
|
|
|
10
|
-
attr_accessor :page_num
|
|
10
|
+
attr_accessor :page_num, :page_size
|
|
11
11
|
|
|
12
|
-
def initialize(page_num: 0)
|
|
12
|
+
def initialize(page_num: 0, page_size: nil)
|
|
13
13
|
@page_num = page_num
|
|
14
|
+
@page_size = page_size || DEFAULT_PAGE_SIZE
|
|
14
15
|
end
|
|
15
16
|
|
|
16
17
|
def self.call
|
|
@@ -51,7 +52,8 @@ module Superset
|
|
|
51
52
|
end
|
|
52
53
|
|
|
53
54
|
def pagination
|
|
54
|
-
"
|
|
55
|
+
raise InvalidParameterError, "page_size max is 1000 records" if page_size.to_i > 1000
|
|
56
|
+
"page:#{page_num},page_size:#{page_size}"
|
|
55
57
|
end
|
|
56
58
|
|
|
57
59
|
def filters
|
|
@@ -4,10 +4,10 @@ module Superset
|
|
|
4
4
|
class List < Superset::Request
|
|
5
5
|
attr_reader :email_contains, :username_equals
|
|
6
6
|
|
|
7
|
-
def initialize(
|
|
7
|
+
def initialize(email_contains: '', username_equals: '', **kwargs)
|
|
8
8
|
@email_contains = email_contains
|
|
9
9
|
@username_equals = username_equals
|
|
10
|
-
super(
|
|
10
|
+
super(**kwargs)
|
|
11
11
|
end
|
|
12
12
|
|
|
13
13
|
private
|
|
@@ -9,15 +9,17 @@ module Superset
|
|
|
9
9
|
module Services
|
|
10
10
|
class DuplicateDashboard < Superset::Request
|
|
11
11
|
|
|
12
|
-
attr_reader :source_dashboard_id, :target_schema, :target_database_id, :allowed_domains, :tags, :publish
|
|
12
|
+
attr_reader :source_dashboard_id, :target_schema, :target_database_id, :target_dataset_suffix_override, :allowed_domains, :tags, :publish, :target_catalog_name
|
|
13
13
|
|
|
14
|
-
def initialize(source_dashboard_id:, target_schema:, target_database_id: , allowed_domains: [], tags: [], publish: false)
|
|
14
|
+
def initialize(source_dashboard_id:, target_schema:, target_database_id: , target_dataset_suffix_override: nil, allowed_domains: [], tags: [], publish: false, target_catalog_name: nil)
|
|
15
15
|
@source_dashboard_id = source_dashboard_id
|
|
16
16
|
@target_schema = target_schema
|
|
17
17
|
@target_database_id = target_database_id
|
|
18
|
+
@target_dataset_suffix_override = target_dataset_suffix_override
|
|
18
19
|
@allowed_domains = allowed_domains
|
|
19
20
|
@tags = tags
|
|
20
21
|
@publish = publish
|
|
22
|
+
@target_catalog_name = target_catalog_name
|
|
21
23
|
end
|
|
22
24
|
|
|
23
25
|
def perform
|
|
@@ -55,6 +57,11 @@ module Superset
|
|
|
55
57
|
|
|
56
58
|
rescue => e
|
|
57
59
|
logger.error("#{e.message}")
|
|
60
|
+
remove_duplicated_objects
|
|
61
|
+
puts "------------------------------------------------------------------------------\n"
|
|
62
|
+
puts "DUPLICATE DASHBOARD FAILED - ERROR: #{e.message}\n"
|
|
63
|
+
puts "REMOVED DUPLICATED OBJECTS - Check log/superset-client.log for more details\n"
|
|
64
|
+
puts "------------------------------------------------------------------------------\n"
|
|
58
65
|
raise e
|
|
59
66
|
end
|
|
60
67
|
|
|
@@ -67,12 +74,12 @@ module Superset
|
|
|
67
74
|
def add_tags_to_new_dashboard
|
|
68
75
|
return unless tags.present?
|
|
69
76
|
|
|
70
|
-
Superset::Tag::AddToObject.new(object_type_id: ObjectType::DASHBOARD,
|
|
77
|
+
Superset::Tag::AddToObject.new(object_type_id: ObjectType::DASHBOARD, target_id: new_dashboard.id, tags: tags).perform
|
|
71
78
|
logger.info " Added tags to dashboard #{new_dashboard.id}: #{tags}"
|
|
72
79
|
rescue => e
|
|
73
80
|
# catching tag error and display in log .. but also alowing the process to finish logs as tag error is fairly insignificant
|
|
74
|
-
logger.error(" FAILED to add tags to new dashboard id: #{new_dashboard.id}. Error
|
|
75
|
-
|
|
81
|
+
logger.error(" FAILED to add tags to new dashboard id: #{new_dashboard.id}. Error: #{e.message}")
|
|
82
|
+
raise ValidationError, "Failed to add tags to new dashboard id: #{new_dashboard.id}. #{e.message}. Missing Tags Values are #{tags}"
|
|
76
83
|
end
|
|
77
84
|
|
|
78
85
|
def created_embedded_config
|
|
@@ -90,31 +97,39 @@ module Superset
|
|
|
90
97
|
|
|
91
98
|
def duplicate_source_dashboard_datasets
|
|
92
99
|
source_dashboard_datasets.each do |dataset|
|
|
93
|
-
# duplicate the dataset, renaming to use of suffix as the target_schema
|
|
94
|
-
# reason: there is a bug(or feature) in the SS
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
existing_datasets = Superset::Dataset::List.new(title_equals: new_dataset_name, schema_equals: target_schema).result
|
|
100
|
+
# duplicate the dataset, renaming to use of suffix as the target_schema OR target_dataset_suffix_override value
|
|
101
|
+
# reason: there is a bug(or feature) in the SS where a dataset name must be uniq when duplicating
|
|
102
|
+
target_dataset_name = new_dataset_name(dataset[:datasource_name])
|
|
103
|
+
existing_datasets = Superset::Dataset::List.new(title_equals: target_dataset_name, schema_equals: target_schema).result
|
|
98
104
|
if existing_datasets.any?
|
|
99
|
-
logger.info "Dataset #{
|
|
100
|
-
new_dataset_id = existing_datasets[0]["id"]
|
|
105
|
+
logger.info " Dataset #{target_dataset_name} already exists. Reusing it"
|
|
106
|
+
new_dataset_id = existing_datasets[0]["id"]
|
|
101
107
|
else
|
|
102
|
-
new_dataset_id = Superset::Dataset::Duplicate.new(source_dataset_id: dataset[:id], new_dataset_name:
|
|
103
|
-
# update the new dataset with the target schema
|
|
104
|
-
Superset::Dataset::UpdateSchema.new(
|
|
108
|
+
new_dataset_id = Superset::Dataset::Duplicate.new(source_dataset_id: dataset[:id], new_dataset_name: target_dataset_name).perform
|
|
109
|
+
# update the new dataset with the target schema, database, catalog
|
|
110
|
+
Superset::Dataset::UpdateSchema.new(
|
|
111
|
+
source_dataset_id: new_dataset_id,
|
|
112
|
+
target_database_id: target_database_id,
|
|
113
|
+
target_schema: target_schema,
|
|
114
|
+
target_catalog: validated_target_database_catalog_name).perform
|
|
105
115
|
end
|
|
106
116
|
# keep track of the previous dataset and the matching new dataset_id
|
|
107
117
|
dataset_duplication_tracker << { source_dataset_id: dataset[:id], new_dataset_id: new_dataset_id }
|
|
108
118
|
end
|
|
109
119
|
end
|
|
110
120
|
|
|
121
|
+
# if a suffix is provided, use it to suffix the dataset name
|
|
122
|
+
# if no suffix is provided, use the schema name as the suffix
|
|
123
|
+
def new_dataset_name(dataset_name)
|
|
124
|
+
return "#{dataset_name}-#{target_schema}" if target_dataset_suffix_override.blank?
|
|
125
|
+
"#{dataset_name}-#{target_dataset_suffix_override.downcase}"
|
|
126
|
+
end
|
|
127
|
+
|
|
111
128
|
def update_charts_with_new_datasets
|
|
112
129
|
logger.info "Updating Charts to point to New Datasets and updating Dashboard json_metadata ..."
|
|
113
130
|
# note dashboard json_metadata currently still points to the old chart ids and is updated here
|
|
114
|
-
|
|
115
131
|
new_dashboard_json_metadata_json_string = new_dashboard_json_metadata_configuration.to_json # need to convert to string for gsub
|
|
116
|
-
|
|
117
|
-
new_charts_list = Superset::Dashboard::Charts::List.new(new_dashboard.id).result
|
|
132
|
+
|
|
118
133
|
new_chart_ids_list = new_charts_list&.map { |r| r['id'] }&.compact
|
|
119
134
|
# get all chart details for the source dashboard
|
|
120
135
|
original_charts = Superset::Dashboard::Charts::List.new(source_dashboard_id).result.map { |r| [r['slice_name'], r['id']] }.to_h
|
|
@@ -144,6 +159,11 @@ module Superset
|
|
|
144
159
|
@new_dashboard_json_metadata_configuration = JSON.parse(new_dashboard_json_metadata_json_string)
|
|
145
160
|
end
|
|
146
161
|
|
|
162
|
+
def new_charts_list
|
|
163
|
+
# get all chart ids for the new dashboard
|
|
164
|
+
@new_charts_list ||= Superset::Dashboard::Charts::List.new(new_dashboard.id).result
|
|
165
|
+
end
|
|
166
|
+
|
|
147
167
|
def duplicate_source_dashboard_filters
|
|
148
168
|
return unless source_dashboard_filter_dataset_ids.length.positive?
|
|
149
169
|
|
|
@@ -160,11 +180,11 @@ module Superset
|
|
|
160
180
|
|
|
161
181
|
def update_source_dashboard_json_metadata
|
|
162
182
|
logger.info " Updated new Dashboard json_metadata charts with new dataset ids"
|
|
163
|
-
Superset::Dashboard::Put.new(
|
|
183
|
+
Superset::Dashboard::Put.new(target_id: new_dashboard.id, params: { "json_metadata" => @new_dashboard_json_metadata_configuration.to_json }).perform
|
|
164
184
|
end
|
|
165
185
|
|
|
166
186
|
def publish_dashboard
|
|
167
|
-
Superset::Dashboard::Put.new(
|
|
187
|
+
Superset::Dashboard::Put.new(target_id: new_dashboard.id, params: { published: publish } ).perform
|
|
168
188
|
end
|
|
169
189
|
|
|
170
190
|
def new_dashboard
|
|
@@ -205,7 +225,7 @@ module Superset
|
|
|
205
225
|
raise ValidationError, "One or more source dashboard filters point to a different schema than the dashboard charts. Identified Unpermittied Filter Dataset Ids are #{unpermitted_filter_dataset_ids.to_s}" if unpermitted_filter_dataset_ids.any?
|
|
206
226
|
|
|
207
227
|
# new dataset validations - Need to be commented for EU dashboard duplication as we are using the existing datasets for the new dashboard
|
|
208
|
-
raise ValidationError, "DATASET NAME CONFLICT: The Target Schema #{target_schema} already has existing datasets named: #{target_schema_matching_dataset_names.join(',')}" unless target_schema_matching_dataset_names.empty?
|
|
228
|
+
raise ValidationError, "DATASET NAME CONFLICT: The Target Database #{target_database_id} with Schema #{target_schema} already has existing datasets named: #{target_schema_matching_dataset_names.join(',')}" unless target_schema_matching_dataset_names.empty?
|
|
209
229
|
validate_source_dashboard_datasets_sql_does_not_hard_code_schema
|
|
210
230
|
|
|
211
231
|
# embedded allowed_domain validations
|
|
@@ -214,8 +234,8 @@ module Superset
|
|
|
214
234
|
|
|
215
235
|
def validate_source_dashboard_datasets_sql_does_not_hard_code_schema
|
|
216
236
|
errors = source_dashboard_datasets.map do |dataset|
|
|
217
|
-
"The Dataset ID #{dataset[:id]} SQL query is hard coded with the schema value
|
|
218
|
-
"Remove all direct embedded schema calls from the Dataset SQL query before continuing." if dataset[:sql]
|
|
237
|
+
"The Dataset ID #{dataset[:id]} SQL query is hard coded with the schema value: #{dataset[:schema]}. This indicates that the dataset can not be duplicated cleanly to point to the target schema. " +
|
|
238
|
+
"Remove all direct embedded schema calls from the Dataset SQL query before continuing." if dataset[:sql]&.include?("#{dataset[:schema]}.")
|
|
219
239
|
end.compact
|
|
220
240
|
raise ValidationError, errors.join("\n") unless errors.empty?
|
|
221
241
|
end
|
|
@@ -241,13 +261,15 @@ module Superset
|
|
|
241
261
|
source_dashboard_datasets.map { |dataset| dataset[:datasource_name] }.uniq
|
|
242
262
|
end
|
|
243
263
|
|
|
244
|
-
# identify any already existing datasets in the target schema that have the same name as the source dashboard datasets
|
|
264
|
+
# identify any already existing datasets in the target database and schema that have the same name as the source dashboard datasets + suffix
|
|
245
265
|
# note this is prior to adding the (COPY) suffix
|
|
246
|
-
# here we will need to decide if we want to use the existing dataset or not
|
|
266
|
+
# here we will need to decide if we want to use the existing dataset or not
|
|
247
267
|
# for now we will exit with an error if we find any existing datasets of the same name
|
|
248
268
|
def target_schema_matching_dataset_names
|
|
249
269
|
@target_schema_matching_dataset_names ||= source_dashboard_dataset_names.map do |source_dataset_name|
|
|
250
|
-
|
|
270
|
+
source_dataset_name_with_suffix = new_dataset_name(source_dataset_name)
|
|
271
|
+
|
|
272
|
+
existing_names = Superset::Dataset::List.new(title_contains: source_dataset_name_with_suffix, database_id_eq: target_database_id, schema_equals: target_schema).result.map{|t|t['table_name']}.uniq # contains match to cover with suffix as well
|
|
251
273
|
unless existing_names.flatten.empty?
|
|
252
274
|
logger.error " HALTING PROCESS: Schema #{target_schema} already has Dataset called #{existing_names}"
|
|
253
275
|
end
|
|
@@ -263,6 +285,15 @@ module Superset
|
|
|
263
285
|
@filter_dataset_ids ||= source_dashboard.filter_configuration.map { |c| c['targets'] }.flatten.compact.map { |c| c['datasetId'] }.flatten.compact.uniq
|
|
264
286
|
end
|
|
265
287
|
|
|
288
|
+
# if multiple catalogs are present, the target_catalog_name must be provided, otherwise use the first catalog
|
|
289
|
+
def validated_target_database_catalog_name
|
|
290
|
+
catalogs = Superset::Database::GetCatalogs.new(target_database_id).catalogs
|
|
291
|
+
return target_catalog_name if catalogs.include?(target_catalog_name)
|
|
292
|
+
|
|
293
|
+
raise ValidationError, "Target Database #{target_database_id} has multiple catalogs, must provide target_catalog_name" if catalogs.size > 1 && target_catalog_name.blank?
|
|
294
|
+
@validated_target_database_catalog_name ||= catalogs.find { |c| c['name'] == target_catalog_name } || catalogs.first
|
|
295
|
+
end
|
|
296
|
+
|
|
266
297
|
# Primary Assumption is that all charts datasets on the source dashboard are pointing to the same database schema
|
|
267
298
|
# An unpermitted filter will have a dataset that is pulling data from a datasource that is
|
|
268
299
|
# different to the dashboard charts database schema
|
|
@@ -279,6 +310,21 @@ module Superset
|
|
|
279
310
|
end
|
|
280
311
|
end
|
|
281
312
|
|
|
313
|
+
# remove the confirmed duplicated objects if the process fails
|
|
314
|
+
# do not use Dashboard::BulkDeleteCascade here as it may remove datasets from the source dashboard as well
|
|
315
|
+
def remove_duplicated_objects
|
|
316
|
+
logger.info "Removing duplicated objects ..."
|
|
317
|
+
|
|
318
|
+
new_dataset_ids = dataset_duplication_tracker&.map { |dataset| dataset[:new_dataset_id] }.compact
|
|
319
|
+
Superset::Dataset::BulkDelete.new(dataset_ids: new_dataset_ids).perform if new_dataset_ids.any?
|
|
320
|
+
|
|
321
|
+
new_chart_ids = new_charts_list&.map { |r| r['id'] }.compact
|
|
322
|
+
Superset::Chart::BulkDelete.new(chart_ids: new_chart_ids).perform if new_chart_ids.any?
|
|
323
|
+
|
|
324
|
+
Superset::Dashboard::Delete.new(dashboard_id: new_dashboard.id).perform if new_dashboard.id.present?
|
|
325
|
+
logger.info "Removed duplicated objects successfully."
|
|
326
|
+
end
|
|
327
|
+
|
|
282
328
|
def logger
|
|
283
329
|
@logger ||= Superset::Logger.new
|
|
284
330
|
end
|
|
@@ -56,6 +56,7 @@ module Superset
|
|
|
56
56
|
|
|
57
57
|
def remove_source_database_config
|
|
58
58
|
return if dashboard_config[:databases].blank?
|
|
59
|
+
|
|
59
60
|
previous_database_name = dashboard_config[:databases]&.first[:content][:database_name]
|
|
60
61
|
File.delete(dashboard_config[:databases].first[:filename])
|
|
61
62
|
|
|
@@ -78,6 +79,8 @@ module Superset
|
|
|
78
79
|
dashboard_config[:datasets].each do |dataset|
|
|
79
80
|
dataset[:content][:database_uuid] = dashboard_config[:databases].first[:content][:uuid]
|
|
80
81
|
dataset[:content][:schema] = target_database_schema
|
|
82
|
+
dataset[:content][:catalog] = nil # reset target to use default catalog if applicable
|
|
83
|
+
|
|
81
84
|
stringified_content = deep_transform_keys_to_strings(dataset[:content])
|
|
82
85
|
File.open(dataset[:filename], 'w') { |f| f.write stringified_content.to_yaml }
|
|
83
86
|
end
|
|
@@ -4,11 +4,11 @@ module Superset
|
|
|
4
4
|
module Tag
|
|
5
5
|
class AddToObject < Superset::Request
|
|
6
6
|
|
|
7
|
-
attr_reader :object_type_id, :
|
|
7
|
+
attr_reader :object_type_id, :target_id, :tags
|
|
8
8
|
|
|
9
|
-
def initialize(object_type_id:,
|
|
9
|
+
def initialize(object_type_id:, target_id:, tags: [])
|
|
10
10
|
@object_type_id = object_type_id
|
|
11
|
-
@
|
|
11
|
+
@target_id = target_id
|
|
12
12
|
@tags = tags
|
|
13
13
|
end
|
|
14
14
|
|
|
@@ -31,7 +31,7 @@ module Superset
|
|
|
31
31
|
def validate_constructor_args
|
|
32
32
|
raise InvalidParameterError, "object_type_id integer is required" unless object_type_id.present? && object_type_id.is_a?(Integer)
|
|
33
33
|
raise InvalidParameterError, "object_type_id is not a known value" unless ObjectType.list.include?(object_type_id)
|
|
34
|
-
raise InvalidParameterError, "
|
|
34
|
+
raise InvalidParameterError, "target_id integer is required" unless target_id.present? && target_id.is_a?(Integer)
|
|
35
35
|
raise InvalidParameterError, "tags array is required" unless tags.present? && tags.is_a?(Array)
|
|
36
36
|
raise InvalidParameterError, "tags array must contain string only values" unless tags.all? { |item| item.is_a?(String) }
|
|
37
37
|
end
|
|
@@ -39,7 +39,7 @@ module Superset
|
|
|
39
39
|
private
|
|
40
40
|
|
|
41
41
|
def route
|
|
42
|
-
"tag/#{object_type_id}/#{
|
|
42
|
+
"tag/#{object_type_id}/#{target_id}/"
|
|
43
43
|
end
|
|
44
44
|
end
|
|
45
45
|
end
|
data/lib/superset/tag/list.rb
CHANGED
|
@@ -23,7 +23,7 @@ module Superset
|
|
|
23
23
|
# TODO filtering across all list classes can be refactored to support multiple options in a more flexible way
|
|
24
24
|
filter_set = []
|
|
25
25
|
filter_set << "(col:name,opr:ct,value:'#{name_contains}')" if name_contains.present?
|
|
26
|
-
filter_set << "(col:name,opr:eq,value
|
|
26
|
+
filter_set << "(col:name,opr:eq,value:'#{name_equals}')" if name_equals.present?
|
|
27
27
|
unless filter_set.empty?
|
|
28
28
|
"filters:!(" + filter_set.join(',') + "),"
|
|
29
29
|
end
|
data/lib/superset/version.rb
CHANGED
data/superset.gemspec
CHANGED
|
@@ -11,7 +11,7 @@ Gem::Specification.new do |spec|
|
|
|
11
11
|
spec.summary = "A Ruby Client for Apache Superset API"
|
|
12
12
|
spec.homepage = "https://github.com/rdytech/superset-client"
|
|
13
13
|
spec.license = "MIT"
|
|
14
|
-
spec.required_ruby_version = ">=
|
|
14
|
+
spec.required_ruby_version = ">= 3"
|
|
15
15
|
|
|
16
16
|
#spec.metadata["allowed_push_host"] = ""
|
|
17
17
|
|
|
@@ -34,21 +34,20 @@ Gem::Specification.new do |spec|
|
|
|
34
34
|
"lib"
|
|
35
35
|
]
|
|
36
36
|
|
|
37
|
-
|
|
38
|
-
spec.add_dependency "dotenv", "~> 2.7"
|
|
39
|
-
spec.add_dependency "json", "~> 2.6"
|
|
37
|
+
spec.add_dependency "json", ">= 2.0"
|
|
40
38
|
spec.add_dependency "terminal-table", "~> 4.0"
|
|
41
|
-
spec.add_dependency "
|
|
42
|
-
spec.add_dependency "
|
|
43
|
-
spec.add_dependency "require_all", "~> 3.0"
|
|
44
|
-
spec.add_dependency "rubyzip", "~> 1.0"
|
|
39
|
+
spec.add_dependency "require_all", ">= 3.0"
|
|
40
|
+
spec.add_dependency "rubyzip", ">= 1.3"
|
|
45
41
|
spec.add_dependency "faraday", "~> 1.0"
|
|
46
42
|
spec.add_dependency "faraday-multipart", "~> 1.0"
|
|
47
|
-
spec.add_dependency "enumerate_it", "
|
|
48
|
-
|
|
49
|
-
spec.add_development_dependency "
|
|
50
|
-
spec.add_development_dependency "
|
|
51
|
-
spec.add_development_dependency "
|
|
43
|
+
spec.add_dependency "enumerate_it", ">= 1.7"
|
|
44
|
+
|
|
45
|
+
spec.add_development_dependency "dotenv", ">= 2.0"
|
|
46
|
+
spec.add_development_dependency "rake", ">= 13.0"
|
|
47
|
+
spec.add_development_dependency "rspec", ">= 3.0"
|
|
48
|
+
spec.add_development_dependency "rubocop", ">= 1.0"
|
|
49
|
+
spec.add_development_dependency "pry", ">= 0.14"
|
|
50
|
+
spec.add_development_dependency "rollbar", ">= 3.0"
|
|
52
51
|
|
|
53
52
|
# For more information and examples about making a new gem, check out our
|
|
54
53
|
# guide at: https://bundler.io/guides/creating_gem.html
|