superset 0.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.buildkite/pipeline.yml +16 -0
- data/.rspec +3 -0
- data/.rubocop.yml +13 -0
- data/CHANGELOG.md +48 -0
- data/Dockerfile +17 -0
- data/LICENSE +21 -0
- data/README.md +205 -0
- data/Rakefile +12 -0
- data/doc/duplicate_dashboards.md +214 -0
- data/doc/setting_up_personal_api_credentials.md +127 -0
- data/docker-compose.override.yml +10 -0
- data/docker-compose.yml +8 -0
- data/env.sample +9 -0
- data/lib/loggers/duplicate_dashboard_logger.rb +15 -0
- data/lib/superset/authenticator.rb +55 -0
- data/lib/superset/chart/bulk_delete.rb +40 -0
- data/lib/superset/chart/delete.rb +30 -0
- data/lib/superset/chart/get.rb +56 -0
- data/lib/superset/chart/list.rb +59 -0
- data/lib/superset/chart/update_dataset.rb +90 -0
- data/lib/superset/client.rb +53 -0
- data/lib/superset/credential/api_user.rb +25 -0
- data/lib/superset/credential/embedded_user.rb +25 -0
- data/lib/superset/dashboard/bulk_delete.rb +42 -0
- data/lib/superset/dashboard/bulk_delete_cascade.rb +52 -0
- data/lib/superset/dashboard/charts/list.rb +47 -0
- data/lib/superset/dashboard/compare.rb +94 -0
- data/lib/superset/dashboard/copy.rb +78 -0
- data/lib/superset/dashboard/datasets/list.rb +74 -0
- data/lib/superset/dashboard/delete.rb +42 -0
- data/lib/superset/dashboard/embedded/get.rb +56 -0
- data/lib/superset/dashboard/embedded/put.rb +35 -0
- data/lib/superset/dashboard/export.rb +98 -0
- data/lib/superset/dashboard/get.rb +51 -0
- data/lib/superset/dashboard/info.rb +17 -0
- data/lib/superset/dashboard/list.rb +99 -0
- data/lib/superset/dashboard/put.rb +37 -0
- data/lib/superset/dashboard/warm_up_cache.rb +42 -0
- data/lib/superset/database/get.rb +30 -0
- data/lib/superset/database/get_schemas.rb +25 -0
- data/lib/superset/database/list.rb +51 -0
- data/lib/superset/dataset/bulk_delete.rb +41 -0
- data/lib/superset/dataset/create.rb +62 -0
- data/lib/superset/dataset/delete.rb +30 -0
- data/lib/superset/dataset/duplicate.rb +62 -0
- data/lib/superset/dataset/get.rb +56 -0
- data/lib/superset/dataset/list.rb +41 -0
- data/lib/superset/dataset/update_query.rb +56 -0
- data/lib/superset/dataset/update_schema.rb +120 -0
- data/lib/superset/dataset/warm_up_cache.rb +41 -0
- data/lib/superset/display.rb +42 -0
- data/lib/superset/enumerations/object_type.rb +11 -0
- data/lib/superset/file_utilities.rb +19 -0
- data/lib/superset/guest_token.rb +69 -0
- data/lib/superset/logger.rb +20 -0
- data/lib/superset/request.rb +62 -0
- data/lib/superset/route_info.rb +34 -0
- data/lib/superset/security/permissions_resources/list.rb +22 -0
- data/lib/superset/security/role/create.rb +25 -0
- data/lib/superset/security/role/get.rb +32 -0
- data/lib/superset/security/role/list.rb +45 -0
- data/lib/superset/security/role/permission/create.rb +35 -0
- data/lib/superset/security/role/permission/get.rb +37 -0
- data/lib/superset/security/user/create.rb +49 -0
- data/lib/superset/security/user/get.rb +27 -0
- data/lib/superset/security/user/list.rb +42 -0
- data/lib/superset/services/duplicate_dashboard.rb +298 -0
- data/lib/superset/sqllab/execute.rb +52 -0
- data/lib/superset/tag/add_to_object.rb +46 -0
- data/lib/superset/tag/get.rb +30 -0
- data/lib/superset/tag/list.rb +37 -0
- data/lib/superset/version.rb +5 -0
- data/lib/superset.rb +17 -0
- data/log/README.md +4 -0
- data/superset.gemspec +55 -0
- metadata +300 -0
@@ -0,0 +1,27 @@
|
|
1
|
+
module Superset
|
2
|
+
module Security
|
3
|
+
module User
|
4
|
+
class Get < Superset::Request
|
5
|
+
attr_reader :id
|
6
|
+
|
7
|
+
def initialize(id)
|
8
|
+
@id = id
|
9
|
+
end
|
10
|
+
|
11
|
+
def result
|
12
|
+
[ super ]
|
13
|
+
end
|
14
|
+
|
15
|
+
private
|
16
|
+
|
17
|
+
def list_attributes
|
18
|
+
[:id, :first_name, :last_name, :email, :login_count, :last_login]
|
19
|
+
end
|
20
|
+
|
21
|
+
def route
|
22
|
+
"security/users/#{id}"
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,42 @@
|
|
1
|
+
module Superset
|
2
|
+
module Security
|
3
|
+
module User
|
4
|
+
class List < Superset::Request
|
5
|
+
attr_reader :email_contains, :username_equals
|
6
|
+
|
7
|
+
def initialize(page_num: 0, email_contains: '', username_equals: '')
|
8
|
+
@email_contains = email_contains
|
9
|
+
@username_equals = username_equals
|
10
|
+
super(page_num: page_num)
|
11
|
+
end
|
12
|
+
|
13
|
+
private
|
14
|
+
|
15
|
+
def route
|
16
|
+
"security/users/?q=(#{query_params})"
|
17
|
+
end
|
18
|
+
|
19
|
+
def filters
|
20
|
+
raise 'ERROR: only one filter supported currently' if email_contains.present? && username_equals.present?
|
21
|
+
|
22
|
+
if email_contains.present?
|
23
|
+
"filters:!((col:email,opr:ct,value:#{email_contains})),"
|
24
|
+
elsif username_equals.present?
|
25
|
+
"filters:!((col:username,opr:eq,value:#{username_equals})),"
|
26
|
+
else
|
27
|
+
''
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
def title
|
32
|
+
"#{response[:count]} Matching Users for Host: #{superset_host}\n" \
|
33
|
+
"#{result.count} Users listed with: #{query_params}"
|
34
|
+
end
|
35
|
+
|
36
|
+
def list_attributes
|
37
|
+
[:id, :first_name, :last_name, :email, :active, :login_count, :last_login]
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
@@ -0,0 +1,298 @@
|
|
1
|
+
# Data Sovereignty validations are enforced, ie confirming
|
2
|
+
# - all charts datasets on the source dashboard are pointing to the same database schema
|
3
|
+
# - all filter datasets on the source dashboard are pointing to the same database schema as the charts
|
4
|
+
|
5
|
+
# - The source dashboard is in the same Superset instance as the target database and target schema
|
6
|
+
|
7
|
+
|
8
|
+
module Superset
|
9
|
+
module Services
|
10
|
+
class DuplicateDashboard < Superset::Request
|
11
|
+
|
12
|
+
attr_reader :source_dashboard_id, :target_schema, :target_database_id, :allowed_domains, :tags, :publish
|
13
|
+
|
14
|
+
def initialize(source_dashboard_id:, target_schema:, target_database_id: , allowed_domains: [], tags: [], publish: false)
|
15
|
+
@source_dashboard_id = source_dashboard_id
|
16
|
+
@target_schema = target_schema
|
17
|
+
@target_database_id = target_database_id
|
18
|
+
@allowed_domains = allowed_domains
|
19
|
+
@tags = tags
|
20
|
+
@publish = publish
|
21
|
+
end
|
22
|
+
|
23
|
+
def perform
|
24
|
+
# validate all params before starting the process
|
25
|
+
validate_params
|
26
|
+
|
27
|
+
# Pull the Datasets for all charts on the source dashboard
|
28
|
+
source_dashboard_datasets
|
29
|
+
|
30
|
+
# create a new_dashboard by copying the source_dashboard using with 'duplicate_slices: true' to get a new set of charts.
|
31
|
+
# The new_dashboard will have a copy of charts from the source_dashboard, but with the same datasets as the source_dashboard
|
32
|
+
new_dashboard
|
33
|
+
|
34
|
+
# Duplicate these Datasets to the new target schema and target database
|
35
|
+
duplicate_source_dashboard_datasets
|
36
|
+
|
37
|
+
# Update the Charts on the New Dashboard with the New Datasets and update the Dashboard json_metadata for the charts
|
38
|
+
update_charts_with_new_datasets
|
39
|
+
|
40
|
+
# Duplicate filters to the new target schema and target database
|
41
|
+
duplicate_source_dashboard_filters
|
42
|
+
|
43
|
+
update_source_dashboard_json_metadata
|
44
|
+
|
45
|
+
created_embedded_config
|
46
|
+
|
47
|
+
add_tags_to_new_dashboard
|
48
|
+
|
49
|
+
publish_dashboard if publish
|
50
|
+
|
51
|
+
end_log_message
|
52
|
+
|
53
|
+
# return the new dashboard id and url
|
54
|
+
{ new_dashboard_id: new_dashboard.id, new_dashboard_url: new_dashboard.url, published: publish }
|
55
|
+
|
56
|
+
rescue => e
|
57
|
+
logger.error("#{e.message}")
|
58
|
+
raise e
|
59
|
+
end
|
60
|
+
|
61
|
+
def new_dashboard_json_metadata_configuration
|
62
|
+
@new_dashboard_json_metadata_configuration ||= new_dashboard.json_metadata
|
63
|
+
end
|
64
|
+
|
65
|
+
private
|
66
|
+
|
67
|
+
def add_tags_to_new_dashboard
|
68
|
+
return unless tags.present?
|
69
|
+
|
70
|
+
Superset::Tag::AddToObject.new(object_type_id: ObjectType::DASHBOARD, object_id: new_dashboard.id, tags: tags).perform
|
71
|
+
logger.info " Added tags to dashboard #{new_dashboard.id}: #{tags}"
|
72
|
+
rescue => e
|
73
|
+
# catching tag error and display in log .. but also alowing the process to finish logs as tag error is fairly insignificant
|
74
|
+
logger.error(" FAILED to add tags to new dashboard id: #{new_dashboard.id}. Error is #{e.message}")
|
75
|
+
logger.error(" Missing Tags Values are #{tags}")
|
76
|
+
end
|
77
|
+
|
78
|
+
def created_embedded_config
|
79
|
+
return unless allowed_domains.present?
|
80
|
+
|
81
|
+
result = Dashboard::Embedded::Put.new(dashboard_id: new_dashboard.id, allowed_domains: allowed_domains).result
|
82
|
+
logger.info "Added Embedded Settings to New Dashboard #{new_dashboard.id}:"
|
83
|
+
logger.info " Embedded Domain allowed_domains: #{result['allowed_domains']}"
|
84
|
+
logger.info " Embedded UUID: #{result['uuid']}"
|
85
|
+
end
|
86
|
+
|
87
|
+
def dataset_duplication_tracker
|
88
|
+
@dataset_duplication_tracker ||= []
|
89
|
+
end
|
90
|
+
|
91
|
+
def duplicate_source_dashboard_datasets
|
92
|
+
source_dashboard_datasets.each do |dataset|
|
93
|
+
# duplicate the dataset, renaming to use of suffix as the target_schema
|
94
|
+
# reason: there is a bug(or feature) in the SS API where a dataset name must be uniq when duplicating.
|
95
|
+
# (note however renaming in the GUI to a dup name works fine)
|
96
|
+
new_dataset_id = Superset::Dataset::Duplicate.new(source_dataset_id: dataset[:id], new_dataset_name: "#{dataset[:datasource_name]}-#{target_schema}").perform
|
97
|
+
|
98
|
+
# keep track of the previous dataset and the matching new dataset_id
|
99
|
+
dataset_duplication_tracker << { source_dataset_id: dataset[:id], new_dataset_id: new_dataset_id }
|
100
|
+
|
101
|
+
# update the new dataset with the target schema and target database
|
102
|
+
Superset::Dataset::UpdateSchema.new(source_dataset_id: new_dataset_id, target_database_id: target_database_id, target_schema: target_schema).perform
|
103
|
+
end
|
104
|
+
end
|
105
|
+
|
106
|
+
def update_charts_with_new_datasets
|
107
|
+
logger.info "Updating Charts to point to New Datasets and updating Dashboard json_metadata ..."
|
108
|
+
# note dashboard json_metadata currently still points to the old chart ids and is updated here
|
109
|
+
|
110
|
+
new_dashboard_json_metadata_json_string = new_dashboard_json_metadata_configuration.to_json # need to convert to string for gsub
|
111
|
+
# get all chart ids for the new dashboard
|
112
|
+
new_charts_list = Superset::Dashboard::Charts::List.new(new_dashboard.id).result
|
113
|
+
new_chart_ids_list = new_charts_list&.map { |r| r['id'] }&.compact
|
114
|
+
# get all chart details for the source dashboard
|
115
|
+
original_charts = Superset::Dashboard::Charts::List.new(source_dashboard_id).result.map { |r| [r['slice_name'], r['id']] }.to_h
|
116
|
+
new_charts = new_charts_list.map { |r| [r['id'], r['slice_name']] }.to_h
|
117
|
+
return unless new_chart_ids_list.any?
|
118
|
+
|
119
|
+
# for each chart, update the charts current dataset_id with the new dataset_id
|
120
|
+
new_chart_ids_list.each do |new_chart_id|
|
121
|
+
|
122
|
+
# get the CURRENT dataset_id for the new chart
|
123
|
+
current_chart_dataset_id = Superset::Chart::Get.new(new_chart_id).datasource_id
|
124
|
+
|
125
|
+
# find the new dataset_id for the new chart, based on the current_chart_dataset_id
|
126
|
+
new_dataset_id = dataset_duplication_tracker.find { |dataset| dataset[:source_dataset_id] == current_chart_dataset_id }&.fetch(:new_dataset_id, nil)
|
127
|
+
|
128
|
+
# update the new chart to target the new dataset_id and to the reference the new target_dashboard_id
|
129
|
+
Superset::Chart::UpdateDataset.new(chart_id: new_chart_id, target_dataset_id: new_dataset_id, target_dashboard_id: new_dashboard.id).perform
|
130
|
+
logger.info " Update Chart #{new_chart_id} to new dataset_id #{new_dataset_id}"
|
131
|
+
|
132
|
+
# update json metadata swaping the old chart_id with the new chart_id
|
133
|
+
original_chart_id = original_charts[new_charts[new_chart_id]]
|
134
|
+
regex_with_numeric_boundaries = Regexp.new("\\b#{original_chart_id.to_s}\\b")
|
135
|
+
new_dashboard_json_metadata_json_string.gsub!(regex_with_numeric_boundaries, new_chart_id.to_s)
|
136
|
+
end
|
137
|
+
|
138
|
+
# convert back to hash .. and store in the new_dashboard_json_metadata_configuration
|
139
|
+
@new_dashboard_json_metadata_configuration = JSON.parse(new_dashboard_json_metadata_json_string)
|
140
|
+
end
|
141
|
+
|
142
|
+
def duplicate_source_dashboard_filters
|
143
|
+
return unless source_dashboard_filter_dataset_ids.length.positive?
|
144
|
+
|
145
|
+
logger.info "Updating Filters to point to new dataset targets ..."
|
146
|
+
configuration = new_dashboard_json_metadata_configuration['native_filter_configuration']&.map do |filter_config|
|
147
|
+
targets = filter_config['targets']
|
148
|
+
target_filter_dataset_id = dataset_duplication_tracker.find { |d| d[:source_dataset_id] == targets.first["datasetId"] }&.fetch(:new_dataset_id, nil)
|
149
|
+
filter_config['targets'] = [targets.first.merge({ "datasetId"=> target_filter_dataset_id })]
|
150
|
+
filter_config
|
151
|
+
end
|
152
|
+
|
153
|
+
@new_dashboard_json_metadata_configuration['native_filter_configuration'] = configuration || []
|
154
|
+
end
|
155
|
+
|
156
|
+
def update_source_dashboard_json_metadata
|
157
|
+
logger.info " Updated new Dashboard json_metadata charts with new dataset ids"
|
158
|
+
Superset::Dashboard::Put.new(target_dashboard_id: new_dashboard.id, params: { "json_metadata" => @new_dashboard_json_metadata_configuration.to_json }).perform
|
159
|
+
end
|
160
|
+
|
161
|
+
def publish_dashboard
|
162
|
+
Superset::Dashboard::Put.new(target_dashboard_id: new_dashboard.id, params: { published: publish } ).perform
|
163
|
+
end
|
164
|
+
|
165
|
+
def new_dashboard
|
166
|
+
@new_dashboard ||= begin
|
167
|
+
copy = Superset::Dashboard::Copy.new(
|
168
|
+
source_dashboard_id: source_dashboard_id,
|
169
|
+
duplicate_slices: true,
|
170
|
+
clear_shared_label_colors: true
|
171
|
+
).perform
|
172
|
+
logger.info(" Copy Dashboard/Charts Completed - New Dashboard ID: #{copy.id}")
|
173
|
+
copy
|
174
|
+
end
|
175
|
+
rescue => e
|
176
|
+
logger.info(" Dashboard::Copy error: #{e.message}")
|
177
|
+
raise "Dashboard::Copy error: #{e.message}"
|
178
|
+
end
|
179
|
+
|
180
|
+
# retrieve the datasets that will be duplicated
|
181
|
+
def source_dashboard_datasets
|
182
|
+
@source_dashboard_datasets ||= Superset::Dashboard::Datasets::List.new(source_dashboard_id).datasets_details
|
183
|
+
rescue => e
|
184
|
+
raise "Unable to retrieve datasets for source dashboard #{source_dashboard_id}: #{e.message}"
|
185
|
+
end
|
186
|
+
|
187
|
+
def validate_params
|
188
|
+
start_log_msg
|
189
|
+
# params validations
|
190
|
+
raise InvalidParameterError, "source_dashboard_id integer is required" unless source_dashboard_id.present? && source_dashboard_id.is_a?(Integer)
|
191
|
+
raise InvalidParameterError, "target_schema string is required" unless target_schema.present? && target_schema.is_a?(String)
|
192
|
+
raise InvalidParameterError, "target_database_id integer is required" unless target_database_id.present? && target_database_id.is_a?(Integer)
|
193
|
+
|
194
|
+
# dashboard validations
|
195
|
+
# Validation of source dashboard existance will occur inside the new_dashboard call
|
196
|
+
|
197
|
+
# schema validations
|
198
|
+
raise ValidationError, "Schema #{target_schema} does not exist in target database: #{target_database_id}" unless target_database_available_schemas.include?(target_schema)
|
199
|
+
raise ValidationError, "The source dashboard datasets are required to point to one schema only. Actual schema list is #{source_dashboard_schemas.join(',')}" if source_dashboard_has_more_than_one_schema?
|
200
|
+
raise ValidationError, "One or more source dashboard filters point to a different schema than the dashboard charts. Identified Unpermittied Filter Dataset Ids are #{unpermitted_filter_dataset_ids.to_s}" if unpermitted_filter_dataset_ids.any?
|
201
|
+
|
202
|
+
# new dataset validations
|
203
|
+
raise ValidationError, "DATASET NAME CONFLICT: The Target Schema #{target_schema} already has existing datasets named: #{target_schema_matching_dataset_names.join(',')}" unless target_schema_matching_dataset_names.empty?
|
204
|
+
validate_source_dashboard_datasets_sql_does_not_hard_code_schema
|
205
|
+
|
206
|
+
# embedded allowed_domain validations
|
207
|
+
raise InvalidParameterError, 'allowed_domains array is required' if allowed_domains.nil? || allowed_domains.class != Array
|
208
|
+
end
|
209
|
+
|
210
|
+
def validate_source_dashboard_datasets_sql_does_not_hard_code_schema
|
211
|
+
errors = source_dashboard_datasets.map do |dataset|
|
212
|
+
"The Dataset ID #{dataset[:id]} SQL query is hard coded with the schema value and can not be duplicated cleanly. " +
|
213
|
+
"Remove all direct embedded schema calls from the Dataset SQL query before continuing." if dataset[:sql].include?("#{dataset[:schema]}.")
|
214
|
+
end.compact
|
215
|
+
raise ValidationError, errors.join("\n") unless errors.empty?
|
216
|
+
end
|
217
|
+
|
218
|
+
def source_dashboard
|
219
|
+
@source_dashboard ||= Superset::Dashboard::Get.new(source_dashboard_id)
|
220
|
+
end
|
221
|
+
|
222
|
+
def target_database_available_schemas
|
223
|
+
Superset::Database::GetSchemas.call(target_database_id)
|
224
|
+
end
|
225
|
+
|
226
|
+
def source_dashboard_has_more_than_one_schema?
|
227
|
+
source_dashboard_schemas.count > 1
|
228
|
+
end
|
229
|
+
|
230
|
+
# Data Sovereignty rules expect only 1 value here, and raise a validation error if there is > 1
|
231
|
+
def source_dashboard_schemas
|
232
|
+
source_dashboard_datasets.map { |dataset| dataset[:schema] }.uniq
|
233
|
+
end
|
234
|
+
|
235
|
+
def source_dashboard_dataset_names
|
236
|
+
source_dashboard_datasets.map { |dataset| dataset[:datasource_name] }.uniq
|
237
|
+
end
|
238
|
+
|
239
|
+
# identify any already existing datasets in the target schema that have the same name as the source dashboard datasets
|
240
|
+
# note this is prior to adding the (COPY) suffix
|
241
|
+
# here we will need to decide if we want to use the existing dataset or not see NEP-????
|
242
|
+
# for now we will exit with an error if we find any existing datasets of the same name
|
243
|
+
def target_schema_matching_dataset_names
|
244
|
+
source_dashboard_dataset_names.map do |source_dataset_name|
|
245
|
+
existing_names = Superset::Dataset::List.new(title_contains: source_dataset_name, schema_equals: target_schema).result.map{|t|t['table_name']}.uniq # contains match to cover with suffix as well
|
246
|
+
unless existing_names.flatten.empty?
|
247
|
+
logger.error " HALTING PROCESS: Schema #{target_schema} already has Dataset called #{existing_names}"
|
248
|
+
end
|
249
|
+
existing_names
|
250
|
+
end.flatten.compact
|
251
|
+
end
|
252
|
+
|
253
|
+
def source_dashboard_dataset_ids
|
254
|
+
source_dashboard_datasets.map{|d|d['id']}
|
255
|
+
end
|
256
|
+
|
257
|
+
def source_dashboard_filter_dataset_ids
|
258
|
+
filters_configuration = JSON.parse(source_dashboard.result['json_metadata'])['native_filter_configuration'] || []
|
259
|
+
return Array.new unless filters_configuration && filters_configuration.any?
|
260
|
+
|
261
|
+
# pull only the filters dataset ids from the dashboard
|
262
|
+
filters_configuration.map { |c| c['targets'] }.flatten.compact.map { |c| c['datasetId'] }.flatten.compact
|
263
|
+
end
|
264
|
+
|
265
|
+
# Primary Assumption is that all charts datasets on the source dashboard are pointing to the same database schema
|
266
|
+
# An unpermitted filter will have a dataset that is pulling data from a datasource that is
|
267
|
+
# different to the dashboard charts database schema
|
268
|
+
def unpermitted_filter_dataset_ids
|
269
|
+
@unpermitted_filter_dataset_ids ||= begin
|
270
|
+
filter_datasets_not_used_in_charts = source_dashboard_filter_dataset_ids - source_dashboard_dataset_ids
|
271
|
+
|
272
|
+
# retrieve any filter_datasets_not_used_in_charts that do not match the source_dashboard_schema
|
273
|
+
filter_datasets_not_used_in_charts.map do |filter_dataset|
|
274
|
+
filter_dataset_schema = Superset::Dataset::Get.new(filter_dataset).schema
|
275
|
+
# return any filter datasets not used in charts that are from a different schema
|
276
|
+
{ filter_dataset_id: filter_dataset, filter_schema: filter_dataset_schema } if [filter_dataset_schema] != source_dashboard_schemas
|
277
|
+
end.compact
|
278
|
+
end
|
279
|
+
end
|
280
|
+
|
281
|
+
def logger
|
282
|
+
@logger ||= Superset::Logger.new
|
283
|
+
end
|
284
|
+
|
285
|
+
def start_log_msg
|
286
|
+
logger.info ""
|
287
|
+
logger.info ">>>>>>>>>>>>>>>>> Starting DuplicateDashboard Service <<<<<<<<<<<<<<<<<<<<<<"
|
288
|
+
logger.info "Source Dashboard URL: #{source_dashboard.url}"
|
289
|
+
logger.info "Duplicating dashboard #{source_dashboard_id} into Target Schema: #{target_schema} in database #{target_database_id}"
|
290
|
+
end
|
291
|
+
|
292
|
+
def end_log_message
|
293
|
+
logger.info "Duplication Successful. New Dashboard URL: #{new_dashboard.url} "
|
294
|
+
logger.info ">>>>>>>>>>>>>>>>> Finished DuplicateDashboard Service <<<<<<<<<<<<<<<<<<<<<<"
|
295
|
+
end
|
296
|
+
end
|
297
|
+
end
|
298
|
+
end
|
@@ -0,0 +1,52 @@
|
|
1
|
+
module Superset
|
2
|
+
module Sqllab
|
3
|
+
class Execute < Superset::Request
|
4
|
+
class InvalidParameterError < StandardError; end
|
5
|
+
|
6
|
+
attr_reader :database_id, :query, :schema, :query_limit
|
7
|
+
|
8
|
+
def initialize(database_id: , query: , schema: 'public', query_limit: 1000)
|
9
|
+
@database_id = database_id
|
10
|
+
@query = query
|
11
|
+
@schema = schema
|
12
|
+
@query_limit = query_limit
|
13
|
+
end
|
14
|
+
|
15
|
+
def perform
|
16
|
+
validate_constructor_args
|
17
|
+
response
|
18
|
+
data
|
19
|
+
end
|
20
|
+
|
21
|
+
def response
|
22
|
+
@response ||= client.post(route, query_params)
|
23
|
+
end
|
24
|
+
|
25
|
+
def data
|
26
|
+
response["data"]
|
27
|
+
end
|
28
|
+
|
29
|
+
private
|
30
|
+
|
31
|
+
def route
|
32
|
+
"sqllab/execute/"
|
33
|
+
end
|
34
|
+
|
35
|
+
def query_params
|
36
|
+
{
|
37
|
+
database_id: database_id,
|
38
|
+
sql: query,
|
39
|
+
schema: schema,
|
40
|
+
queryLimit: query_limit,
|
41
|
+
runAsync: false,
|
42
|
+
}
|
43
|
+
end
|
44
|
+
|
45
|
+
def validate_constructor_args
|
46
|
+
raise InvalidParameterError, "database_id integer is required" unless database_id.present? && database_id.is_a?(Integer)
|
47
|
+
raise InvalidParameterError, "query string is required" unless query.present? && query.is_a?(String)
|
48
|
+
raise InvalidParameterError, "schema must be a String type" unless schema.is_a?(String)
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
@@ -0,0 +1,46 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Superset
|
4
|
+
module Tag
|
5
|
+
class AddToObject < Superset::Request
|
6
|
+
|
7
|
+
attr_reader :object_type_id, :object_id, :tags
|
8
|
+
|
9
|
+
def initialize(object_type_id:, object_id:, tags: [])
|
10
|
+
@object_type_id = object_type_id
|
11
|
+
@object_id = object_id
|
12
|
+
@tags = tags
|
13
|
+
end
|
14
|
+
|
15
|
+
def perform
|
16
|
+
validate_constructor_args
|
17
|
+
|
18
|
+
response # NOTE API response for success is {} .. not particularly informative
|
19
|
+
end
|
20
|
+
|
21
|
+
def response
|
22
|
+
@response ||= client.post(route, params)
|
23
|
+
end
|
24
|
+
|
25
|
+
def params
|
26
|
+
{
|
27
|
+
"properties": { "tags": tags }
|
28
|
+
}
|
29
|
+
end
|
30
|
+
|
31
|
+
def validate_constructor_args
|
32
|
+
raise InvalidParameterError, "object_type_id integer is required" unless object_type_id.present? && object_type_id.is_a?(Integer)
|
33
|
+
raise InvalidParameterError, "object_type_id is not a known value" unless ObjectType.list.include?(object_type_id)
|
34
|
+
raise InvalidParameterError, "object_id integer is required" unless object_id.present? && object_id.is_a?(Integer)
|
35
|
+
raise InvalidParameterError, "tags array is required" unless tags.present? && tags.is_a?(Array)
|
36
|
+
raise InvalidParameterError, "tags array must contain string only values" unless tags.all? { |item| item.is_a?(String) }
|
37
|
+
end
|
38
|
+
|
39
|
+
private
|
40
|
+
|
41
|
+
def route
|
42
|
+
"tag/#{object_type_id}/#{object_id}/"
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
@@ -0,0 +1,30 @@
|
|
1
|
+
module Superset
|
2
|
+
module Tag
|
3
|
+
class Get < Superset::Request
|
4
|
+
|
5
|
+
attr_reader :id
|
6
|
+
|
7
|
+
def initialize(id)
|
8
|
+
@id = id
|
9
|
+
end
|
10
|
+
|
11
|
+
def self.call(id)
|
12
|
+
self.new(id).list
|
13
|
+
end
|
14
|
+
|
15
|
+
def result
|
16
|
+
[ super ]
|
17
|
+
end
|
18
|
+
|
19
|
+
private
|
20
|
+
|
21
|
+
def list_attributes
|
22
|
+
['id', 'name', 'type', 'description']
|
23
|
+
end
|
24
|
+
|
25
|
+
def route
|
26
|
+
"tag/#{id}"
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
@@ -0,0 +1,37 @@
|
|
1
|
+
module Superset
|
2
|
+
module Tag
|
3
|
+
class List < Superset::Request
|
4
|
+
attr_reader :name_contains, :name_equals
|
5
|
+
|
6
|
+
def initialize(page_num: 0, name_contains: '', name_equals: '')
|
7
|
+
@name_contains = name_contains
|
8
|
+
@name_equals = name_equals
|
9
|
+
super(page_num: page_num)
|
10
|
+
end
|
11
|
+
|
12
|
+
def self.call
|
13
|
+
self.new.list
|
14
|
+
end
|
15
|
+
|
16
|
+
private
|
17
|
+
|
18
|
+
def route
|
19
|
+
"tag/?q=(#{query_params})"
|
20
|
+
end
|
21
|
+
|
22
|
+
def filters
|
23
|
+
# TODO filtering across all list classes can be refactored to support multiple options in a more flexible way
|
24
|
+
filter_set = []
|
25
|
+
filter_set << "(col:name,opr:ct,value:'#{name_contains}')" if name_contains.present?
|
26
|
+
filter_set << "(col:name,opr:eq,value:#{name_equals})" if name_equals.present?
|
27
|
+
unless filter_set.empty?
|
28
|
+
"filters:!(" + filter_set.join(',') + "),"
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
def list_attributes
|
33
|
+
['id', 'name', 'description']
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
data/lib/superset.rb
ADDED
@@ -0,0 +1,17 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'require_all'
|
4
|
+
|
5
|
+
require_rel "superset/credential"
|
6
|
+
require_relative "superset/authenticator"
|
7
|
+
require_relative "superset/client"
|
8
|
+
require_relative "superset/display"
|
9
|
+
require_relative "superset/logger"
|
10
|
+
require_relative "superset/request"
|
11
|
+
|
12
|
+
require_rel "superset"
|
13
|
+
|
14
|
+
module Superset
|
15
|
+
class Error < StandardError; end
|
16
|
+
# Your code goes here...
|
17
|
+
end
|
data/log/README.md
ADDED
data/superset.gemspec
ADDED
@@ -0,0 +1,55 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require_relative "lib/superset/version"
|
4
|
+
|
5
|
+
Gem::Specification.new do |spec|
|
6
|
+
spec.name = "superset"
|
7
|
+
spec.version = Superset::VERSION
|
8
|
+
spec.authors = ["jbat"]
|
9
|
+
spec.email = ["jonathon.batson@gmail.com"]
|
10
|
+
|
11
|
+
spec.summary = "A Ruby Client for Apache Superset API"
|
12
|
+
spec.homepage = "https://github.com/rdytech/superset-client"
|
13
|
+
spec.license = "MIT"
|
14
|
+
spec.required_ruby_version = ">= 2.6.0"
|
15
|
+
|
16
|
+
#spec.metadata["allowed_push_host"] = ""
|
17
|
+
|
18
|
+
#spec.metadata["homepage_uri"] = spec.homepage
|
19
|
+
#spec.metadata["source_code_uri"] = ""
|
20
|
+
#spec.metadata["changelog_uri"] = ""
|
21
|
+
|
22
|
+
# Specify which files should be added to the gem when it is released.
|
23
|
+
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
|
24
|
+
spec.files = Dir.chdir(__dir__) do
|
25
|
+
`git ls-files -z`.split("\x0").reject do |f|
|
26
|
+
(File.expand_path(f) == __FILE__) ||
|
27
|
+
f.start_with?(*%w[bin/ test/ spec/ features/ .git appveyor Gemfile])
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
#spec.bindir = "exe"
|
32
|
+
#spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
|
33
|
+
spec.require_paths = [
|
34
|
+
"lib"
|
35
|
+
]
|
36
|
+
|
37
|
+
# Uncomment to register a new dependency of your gem
|
38
|
+
spec.add_dependency "dotenv", "~> 2.7"
|
39
|
+
spec.add_dependency "json", "~> 2.6"
|
40
|
+
spec.add_dependency "terminal-table", "~> 1.8"
|
41
|
+
spec.add_dependency "rake", "~> 13.0"
|
42
|
+
spec.add_dependency "rollbar", "~> 3.4"
|
43
|
+
spec.add_dependency "require_all", "~> 3.0"
|
44
|
+
spec.add_dependency "rubyzip", "~> 1.0"
|
45
|
+
spec.add_dependency "faraday", "~> 1.0"
|
46
|
+
spec.add_dependency "faraday-multipart", "~> 1.0"
|
47
|
+
spec.add_dependency "enumerate_it", "~> 1.7.0"
|
48
|
+
|
49
|
+
spec.add_development_dependency "rspec", "~> 3.0"
|
50
|
+
spec.add_development_dependency "rubocop", "~> 1.5"
|
51
|
+
spec.add_development_dependency "pry", "~> 0.14"
|
52
|
+
|
53
|
+
# For more information and examples about making a new gem, check out our
|
54
|
+
# guide at: https://bundler.io/guides/creating_gem.html
|
55
|
+
end
|