superset 0.1.6 → 0.2.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +33 -0
- data/README.md +36 -144
- data/doc/duplicate_dashboards.md +2 -5
- data/doc/migrating_dashboards_across_environments.md +173 -0
- data/doc/publishing.md +39 -0
- data/doc/setting_up_personal_api_credentials.md +43 -7
- data/doc/usage.md +105 -0
- data/env.sample +1 -1
- data/lib/superset/base_put_request.rb +30 -0
- data/lib/superset/chart/create.rb +40 -0
- data/lib/superset/chart/duplicate.rb +75 -0
- data/lib/superset/chart/put.rb +18 -0
- data/lib/superset/chart/update_dataset.rb +1 -1
- data/lib/superset/client.rb +7 -1
- data/lib/superset/dashboard/bulk_delete_cascade.rb +1 -1
- data/lib/superset/dashboard/compare.rb +2 -2
- data/lib/superset/dashboard/datasets/list.rb +37 -9
- data/lib/superset/dashboard/embedded/get.rb +2 -2
- data/lib/superset/dashboard/export.rb +56 -5
- data/lib/superset/dashboard/get.rb +5 -0
- data/lib/superset/dashboard/import.rb +84 -0
- data/lib/superset/dashboard/list.rb +8 -4
- data/lib/superset/dashboard/warm_up_cache.rb +1 -1
- data/lib/superset/database/export.rb +119 -0
- data/lib/superset/database/list.rb +5 -2
- data/lib/superset/dataset/get.rb +10 -11
- data/lib/superset/dataset/list.rb +1 -1
- data/lib/superset/dataset/put.rb +18 -0
- data/lib/superset/dataset/update_schema.rb +4 -3
- data/lib/superset/file_utilities.rb +4 -3
- data/lib/superset/guest_token.rb +14 -7
- data/lib/superset/logger.rb +2 -2
- data/lib/superset/request.rb +7 -4
- data/lib/superset/services/dashboard_loader.rb +69 -0
- data/lib/superset/services/duplicate_dashboard.rb +14 -13
- data/lib/superset/services/import_dashboard_across_environment.rb +144 -0
- data/lib/superset/version.rb +1 -1
- metadata +15 -3
data/doc/usage.md
ADDED
@@ -0,0 +1,105 @@
|
|
1
|
+
# Usage
|
2
|
+
|
3
|
+
## API call examples
|
4
|
+
|
5
|
+
Quickstart examples
|
6
|
+
|
7
|
+
```ruby
|
8
|
+
Superset::Database::List.call
|
9
|
+
Superset::Database::GetSchemas.new(1).list # get schemas for database 1
|
10
|
+
|
11
|
+
Superset::Dashboard::List.call
|
12
|
+
Superset::Dashboard::List.new(title_contains: 'Sales').list
|
13
|
+
Superset::Dashboard::Datasets::List.new(dashboard_id: 10).list # get all datasets for dashboard 10
|
14
|
+
|
15
|
+
Superset::Dashboard::Datasets::List.new(dashboard_id: 10, include_filter_datasets: true).list # get all datasets for dashboard 10 including the ones used only in dashboard filters
|
16
|
+
Superset::Dashboard::WarmUpCache.new(dashboard_id: 10).perform
|
17
|
+
|
18
|
+
Superset::Dataset::List.call
|
19
|
+
|
20
|
+
Superset::Chart::List.call
|
21
|
+
|
22
|
+
Superset::Dashboard::BulkDelete.new(dashboard_ids: [1,2,3]).perform # Deletes Dashboards only ( leaves all charts, datasets in place)
|
23
|
+
Superset::Dashboard::BulkDeleteCascade.new(dashboard_ids: [1,2,3]).perform # Deletes Dashboards and related charts and datasets. (WARNING: no checks are performed)
|
24
|
+
|
25
|
+
Superset::Sqllab::Execute.new(database_id: 1, schema: 'public', query: 'select count(*) from birth_names').perform
|
26
|
+
|
27
|
+
Superset::Dashboard::Export.new(dashboard_id: 1, destination_path: '/tmp').perform
|
28
|
+
|
29
|
+
Superset::RouteInfo.new(route: 'dashboard/_info').perform # Get info on an API endpoint .. handy for getting available filters
|
30
|
+
Superset::RouteInfo.new(route: 'chart/_info').filters # OR just get the filters for an endpoint
|
31
|
+
|
32
|
+
superset_class_list # helper method to list all classes under Superset::
|
33
|
+
|
34
|
+
```
|
35
|
+
|
36
|
+
## Detailed API Examples
|
37
|
+
|
38
|
+
Generally classes follow the convention/path of the Superset API strucuture as per the swagger docs.
|
39
|
+
|
40
|
+
ref https://superset.apache.org/docs/api/
|
41
|
+
|
42
|
+
Limited support for filters is available on some list pages, primarily through param `title_contains`.
|
43
|
+
Pagination is supported via `page_num` param.
|
44
|
+
|
45
|
+
Primary methods across majority of api calls are
|
46
|
+
- response : the full API response
|
47
|
+
- result : just the result attribute array
|
48
|
+
- list : displays a formatted output to console, handy for quick investigation of objects
|
49
|
+
- call : is a class method to list on Get and List requests
|
50
|
+
|
51
|
+
```ruby
|
52
|
+
# List all Databases
|
53
|
+
Superset::Database::List.call
|
54
|
+
# DEBUG -- : Happi: GET https://your-superset-host/api/v1/database/?q=(page:0,page_size:100), {}
|
55
|
+
+----+------------------------------------+------------+------------------+
|
56
|
+
| Superset::Database::List |
|
57
|
+
+----+------------------------------------+------------+------------------+
|
58
|
+
| Id | Database name | Backend | Expose in sqllab |
|
59
|
+
+----+------------------------------------+------------+------------------+
|
60
|
+
| 1 | examples | postgresql | true |
|
61
|
+
+----+------------------------------------+------------+------------------+
|
62
|
+
|
63
|
+
# List database schemas for Database 1
|
64
|
+
Superset::Database::GetSchemas.new(1).list
|
65
|
+
# DEBUG -- : Happi: GET https://your-superset-host/api/v1/database/1/schemas/, {}
|
66
|
+
=> ["information_schema", "public"]
|
67
|
+
|
68
|
+
# List dashboards
|
69
|
+
Superset::Dashboard::List.call
|
70
|
+
# PAGE_SIZE is set to 100, so get the second set of 100 dashboards with
|
71
|
+
Superset::Dashboard::List.new(page_num: 1).list
|
72
|
+
# OR filter by title
|
73
|
+
Superset::Dashboard::List.new(title_contains: 'Sales').list
|
74
|
+
# DEBUG -- : Happi: GET https://your-superset-host/api/v1/dashboard/?q=(filters:!((col:dashboard_title,opr:ct,value:'Sales')),page:0,page_size:100), {}
|
75
|
+
|
76
|
+
+-----+------------------------------+-----------+--------------------------------------------------------------------+
|
77
|
+
| Superset::Dashboard::List |
|
78
|
+
+-----+------------------------------+-----------+--------------------------------------------------------------------+
|
79
|
+
| Id | Dashboard title | Status | Url |
|
80
|
+
+-----+------------------------------+-----------+--------------------------------------------------------------------+
|
81
|
+
| 6 | Video Game Sales | published | https://your-superset-host/superset/dashboard/6/ |
|
82
|
+
| 8 | Sales Dashboard | published | https://your-superset-host/superset/dashboard/8/ |
|
83
|
+
+-----+------------------------------+-----------+--------------------------------------------------------------------+
|
84
|
+
|
85
|
+
|
86
|
+
Superset::Dashboard::Get.call(1) # same as Superset::Dashboard::Get.new(1).list
|
87
|
+
+----------------------------+
|
88
|
+
| World Banks Data |
|
89
|
+
+----------------------------+
|
90
|
+
| Charts |
|
91
|
+
+----------------------------+
|
92
|
+
| % Rural |
|
93
|
+
| Region Filter |
|
94
|
+
| Life Expectancy VS Rural % |
|
95
|
+
| Box plot |
|
96
|
+
| Most Populated Countries |
|
97
|
+
| Worlds Population |
|
98
|
+
| Worlds Pop Growth |
|
99
|
+
| Rural Breakdown |
|
100
|
+
| Treemap |
|
101
|
+
| Growth Rate |
|
102
|
+
+----------------------------+
|
103
|
+
|
104
|
+
|
105
|
+
```
|
data/env.sample
CHANGED
@@ -0,0 +1,30 @@
|
|
1
|
+
module Superset
|
2
|
+
class BasePutRequest < Superset::Request
|
3
|
+
attr_reader :object_id, :params
|
4
|
+
|
5
|
+
def initialize(object_id: ,params: )
|
6
|
+
@object_id = object_id
|
7
|
+
@params = params
|
8
|
+
end
|
9
|
+
|
10
|
+
def perform
|
11
|
+
validate
|
12
|
+
response
|
13
|
+
end
|
14
|
+
|
15
|
+
def response
|
16
|
+
@response ||= client.put(route, params)
|
17
|
+
end
|
18
|
+
|
19
|
+
private
|
20
|
+
|
21
|
+
def validate
|
22
|
+
raise "Error: object_id integer is required" unless object_id.present? && object_id.is_a?(Integer)
|
23
|
+
raise "Error: params hash is required" unless params.present? && params.is_a?(Hash)
|
24
|
+
end
|
25
|
+
|
26
|
+
def route
|
27
|
+
raise "Error: route method is required"
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
@@ -0,0 +1,40 @@
|
|
1
|
+
=begin
|
2
|
+
Create a new chart from a set of params
|
3
|
+
Suggestion is to base your params of an existing charts params and then modify them as needed
|
4
|
+
So .. why not call the Superset::Chart::Duplicate class which then calls this Chart::Create class
|
5
|
+
|
6
|
+
This class is a bit more generic and can be used to create a new chart from scratch (if your confident in the params)
|
7
|
+
|
8
|
+
Usage:
|
9
|
+
Superset::Chart::Create.new(params: new_chart_params).perform
|
10
|
+
=end
|
11
|
+
|
12
|
+
module Superset
|
13
|
+
module Chart
|
14
|
+
class Create < Superset::Request
|
15
|
+
|
16
|
+
attr_reader :params
|
17
|
+
|
18
|
+
def initialize(params: )
|
19
|
+
@params = params
|
20
|
+
end
|
21
|
+
|
22
|
+
def perform
|
23
|
+
raise "Error: params hash is required" unless params.present? && params.is_a?(Hash)
|
24
|
+
|
25
|
+
logger.info("Creating New Chart")
|
26
|
+
response['id']
|
27
|
+
end
|
28
|
+
|
29
|
+
def response
|
30
|
+
@response ||= client.post(route, params)
|
31
|
+
end
|
32
|
+
|
33
|
+
private
|
34
|
+
|
35
|
+
def route
|
36
|
+
"chart/"
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
@@ -0,0 +1,75 @@
|
|
1
|
+
# There is no API endpoint to duplicate charts in Superset.
|
2
|
+
# This class is a workaround.
|
3
|
+
# Requires a source chart id, target dataset id
|
4
|
+
|
5
|
+
module Superset
|
6
|
+
module Chart
|
7
|
+
class Duplicate < Superset::Request
|
8
|
+
|
9
|
+
attr_reader :source_chart_id, :target_dataset_id, :new_chart_name
|
10
|
+
|
11
|
+
def initialize(source_chart_id: , target_dataset_id: , new_chart_name: )
|
12
|
+
@source_chart_id = source_chart_id
|
13
|
+
@target_dataset_id = target_dataset_id
|
14
|
+
@new_chart_name = new_chart_name
|
15
|
+
end
|
16
|
+
|
17
|
+
def perform
|
18
|
+
raise "Error: source_chart_id integer is required" unless source_chart_id.present? && source_chart_id.is_a?(Integer)
|
19
|
+
raise "Error: target_dataset_id integer is required" unless target_dataset_id.present? && target_dataset_id.is_a?(Integer)
|
20
|
+
raise "Error: new_chart_name string is required" unless new_chart_name.present? && new_chart_name.is_a?(String)
|
21
|
+
|
22
|
+
logger.info("Duplicating Chart #{source_chart_id}:#{source_chart['slice_name']}. New chart dataset #{target_dataset_id} and new chart name #{new_chart_name}")
|
23
|
+
Superset::Chart::Create.new(params: new_chart_params).perform
|
24
|
+
end
|
25
|
+
|
26
|
+
private
|
27
|
+
|
28
|
+
def new_chart_params
|
29
|
+
# pulled list from Swagger GUI for chart POST request
|
30
|
+
# commented out params seem to be not required .. figured out by trial and error
|
31
|
+
{
|
32
|
+
#"cache_timeout": 0,
|
33
|
+
#"certification_details": "string",
|
34
|
+
#"certified_by": "string",
|
35
|
+
#"dashboards": [ 0 ],
|
36
|
+
"datasource_id": target_dataset_id,
|
37
|
+
# "datasource_name": new_chart_name,
|
38
|
+
"datasource_type": "table",
|
39
|
+
# "description": "",
|
40
|
+
# "external_url": "string",
|
41
|
+
# "is_managed_externally": true,
|
42
|
+
# "owners": [ 3 ], # TODO .. check if this is a Required attr, might need to get current API users id.
|
43
|
+
"params": new_chart_internal_params,
|
44
|
+
"query_context": new_chart_internal_query_context,
|
45
|
+
"query_context_generation": true,
|
46
|
+
"slice_name": new_chart_name,
|
47
|
+
"viz_type": source_chart['viz_type']
|
48
|
+
}
|
49
|
+
end
|
50
|
+
|
51
|
+
def new_chart_internal_params
|
52
|
+
new_params = JSON.parse(source_chart['params'])
|
53
|
+
new_params['datasource'] = new_params['datasource'].gsub(source_chart_dataset_id.to_s, target_dataset_id.to_s)
|
54
|
+
new_params.delete('slice_id') # refers to the source chart id .. a new id will be generated in the new chart
|
55
|
+
new_params.to_json
|
56
|
+
end
|
57
|
+
|
58
|
+
def new_chart_internal_query_context
|
59
|
+
new_query_context = JSON.parse(source_chart['query_context'])
|
60
|
+
new_query_context['datasource'] = new_query_context['datasource']['id'] = target_dataset_id
|
61
|
+
new_query_context['form_data']['datasource'] = new_query_context['form_data']['datasource'].gsub(source_chart_dataset_id.to_s, target_dataset_id.to_s)
|
62
|
+
new_query_context['form_data'].delete('slice_id')
|
63
|
+
new_query_context.to_json
|
64
|
+
end
|
65
|
+
|
66
|
+
def source_chart
|
67
|
+
@source_chart ||= Superset::Chart::Get.new(source_chart_id).result[0]
|
68
|
+
end
|
69
|
+
|
70
|
+
def source_chart_dataset_id
|
71
|
+
@source_chart_dataset_id ||= JSON.parse(source_chart[:query_context])['datasource']['id']
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|
75
|
+
end
|
@@ -0,0 +1,18 @@
|
|
1
|
+
# Updates a chart in Superset with the given params
|
2
|
+
#
|
3
|
+
# Usage:
|
4
|
+
# params = { owners: [ 58, 3 ] }
|
5
|
+
# Superset::Chart::Put.new(object_id: 202, params: params ).perform
|
6
|
+
|
7
|
+
module Superset
|
8
|
+
module Chart
|
9
|
+
class Put < Superset::BasePutRequest
|
10
|
+
|
11
|
+
private
|
12
|
+
|
13
|
+
def route
|
14
|
+
"chart/#{object_id}"
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
@@ -43,7 +43,7 @@ module Superset
|
|
43
43
|
new_params.merge!("query_context": query_context.to_json) # update to point to the new query context
|
44
44
|
new_params.merge!("query_context_generation": true) # new param set to true to regenerate the query context
|
45
45
|
end
|
46
|
-
|
46
|
+
|
47
47
|
new_params
|
48
48
|
end
|
49
49
|
end
|
data/lib/superset/client.rb
CHANGED
@@ -44,7 +44,13 @@ module Superset
|
|
44
44
|
@connection ||= Faraday.new(superset_host) do |f|
|
45
45
|
f.authorization :Bearer, access_token
|
46
46
|
f.use FaradayMiddleware::ParseJson, content_type: 'application/json'
|
47
|
-
|
47
|
+
|
48
|
+
if self.config.use_json
|
49
|
+
f.request :json
|
50
|
+
else
|
51
|
+
f.request :multipart
|
52
|
+
f.request :url_encoded
|
53
|
+
end
|
48
54
|
|
49
55
|
f.adapter :net_http
|
50
56
|
end
|
@@ -31,7 +31,7 @@ module Superset
|
|
31
31
|
private
|
32
32
|
|
33
33
|
def delete_datasets(dashboard_id)
|
34
|
-
datasets_to_delete = Superset::Dashboard::Datasets::List.new(dashboard_id).datasets_details.map{|d| d[:id] }
|
34
|
+
datasets_to_delete = Superset::Dashboard::Datasets::List.new(dashboard_id: dashboard_id).datasets_details.map{|d| d[:id] }
|
35
35
|
Superset::Dataset::BulkDelete.new(dataset_ids: datasets_to_delete).perform if datasets_to_delete.any?
|
36
36
|
end
|
37
37
|
|
@@ -37,8 +37,8 @@ module Superset
|
|
37
37
|
|
38
38
|
def list_datasets
|
39
39
|
puts "\n ====== DASHBOARD DATASETS ====== "
|
40
|
-
Superset::Dashboard::Datasets::List.new(first_dashboard_id).list
|
41
|
-
Superset::Dashboard::Datasets::List.new(second_dashboard_id).list
|
40
|
+
Superset::Dashboard::Datasets::List.new(dashboard_id: first_dashboard_id).list
|
41
|
+
Superset::Dashboard::Datasets::List.new(dashboard_id: second_dashboard_id).list
|
42
42
|
end
|
43
43
|
|
44
44
|
def list_charts
|
@@ -7,14 +7,15 @@ module Superset
|
|
7
7
|
module Dashboard
|
8
8
|
module Datasets
|
9
9
|
class List < Superset::Request
|
10
|
-
attr_reader :id
|
10
|
+
attr_reader :id, :include_filter_datasets # id - dashboard id
|
11
11
|
|
12
12
|
def self.call(id)
|
13
13
|
self.new(id).list
|
14
14
|
end
|
15
15
|
|
16
|
-
def initialize(
|
17
|
-
@id =
|
16
|
+
def initialize(dashboard_id:, include_filter_datasets: false)
|
17
|
+
@id = dashboard_id
|
18
|
+
@include_filter_datasets = include_filter_datasets
|
18
19
|
end
|
19
20
|
|
20
21
|
def perform
|
@@ -24,7 +25,7 @@ module Superset
|
|
24
25
|
|
25
26
|
def schemas
|
26
27
|
@schemas ||= begin
|
27
|
-
all_dashboard_schemas =
|
28
|
+
all_dashboard_schemas = datasets_details.map {|d| d[:schema] }.uniq
|
28
29
|
|
29
30
|
# For the current superset setup we will assume a dashboard datasets will point to EXACTLY one schema, their own.
|
30
31
|
# if not .. we need to know about it. Potentially we could override this check if others do not consider it a problem.
|
@@ -36,37 +37,64 @@ module Superset
|
|
36
37
|
end
|
37
38
|
|
38
39
|
def datasets_details
|
39
|
-
result.map do |details|
|
40
|
+
chart_datasets = result.map do |details|
|
40
41
|
details.slice('id', 'datasource_name', 'schema', 'sql').merge('database' => details['database'].slice('id', 'name', 'backend')).with_indifferent_access
|
41
42
|
end
|
43
|
+
return chart_datasets unless include_filter_datasets
|
44
|
+
chart_dataset_ids = chart_datasets.map{|d| d['id'] }
|
45
|
+
filter_dataset_ids_not_used_in_charts = filter_dataset_ids - chart_dataset_ids
|
46
|
+
return chart_datasets if filter_dataset_ids_not_used_in_charts.empty?
|
47
|
+
# returning chart and filter datasets
|
48
|
+
chart_datasets + filter_datasets(filter_dataset_ids_not_used_in_charts)
|
42
49
|
end
|
43
50
|
|
44
51
|
private
|
45
52
|
|
53
|
+
def filter_dataset_ids
|
54
|
+
@filter_dataset_ids ||= dashboard.filter_configuration.map { |c| c['targets'] }.flatten.compact.map { |c| c['datasetId'] }.flatten.compact.uniq
|
55
|
+
end
|
56
|
+
|
57
|
+
def filter_datasets(filter_dataset_ids_not_used_in_charts)
|
58
|
+
filter_dataset_ids_not_used_in_charts.map do |filter_dataset_id|
|
59
|
+
filter_dataset = Superset::Dataset::Get.new(filter_dataset_id).result
|
60
|
+
database_info = {
|
61
|
+
'id' => filter_dataset['database']['id'],
|
62
|
+
'name' => filter_dataset['database']['database_name'],
|
63
|
+
'backend' => filter_dataset['database']['backend']
|
64
|
+
}
|
65
|
+
filter_dataset.slice('id', 'datasource_name', 'schema', 'sql').merge('database' => database_info, 'filter_only': true).with_indifferent_access
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
46
69
|
def route
|
47
70
|
"dashboard/#{id}/datasets"
|
48
71
|
end
|
49
72
|
|
50
73
|
def list_attributes
|
51
|
-
['id', 'datasource_name', 'database_id', 'database_name', 'database_backend', 'schema'].map(&:to_sym)
|
74
|
+
['id', 'datasource_name', 'database_id', 'database_name', 'database_backend', 'schema', 'filter_only'].map(&:to_sym)
|
52
75
|
end
|
53
76
|
|
54
77
|
def rows
|
55
|
-
|
78
|
+
datasets_details.map do |d|
|
56
79
|
[
|
57
80
|
d[:id],
|
58
81
|
d[:datasource_name],
|
59
82
|
d[:database][:id],
|
60
83
|
d[:database][:name],
|
61
84
|
d[:database][:backend],
|
62
|
-
d[:schema]
|
85
|
+
d[:schema],
|
86
|
+
d[:filter_only]
|
63
87
|
]
|
64
88
|
end
|
65
89
|
end
|
66
90
|
|
67
91
|
# when displaying a list of datasets, show dashboard title as well
|
68
92
|
def title
|
69
|
-
@title ||= [id,
|
93
|
+
@title ||= [id, dashboard.title].join(' ')
|
94
|
+
end
|
95
|
+
|
96
|
+
def dashboard
|
97
|
+
@dashboard ||= Superset::Dashboard::Get.new(id)
|
70
98
|
end
|
71
99
|
end
|
72
100
|
end
|
@@ -13,7 +13,7 @@ module Superset
|
|
13
13
|
class Export < Request
|
14
14
|
include FileUtilities
|
15
15
|
|
16
|
-
TMP_SUPERSET_DASHBOARD_PATH = '/tmp/
|
16
|
+
TMP_SUPERSET_DASHBOARD_PATH = '/tmp/superset_dashboard_exports'
|
17
17
|
|
18
18
|
attr_reader :dashboard_id, :destination_path
|
19
19
|
|
@@ -23,10 +23,18 @@ module Superset
|
|
23
23
|
end
|
24
24
|
|
25
25
|
def perform
|
26
|
+
logger.info("Exporting dashboard: #{dashboard_id}")
|
26
27
|
create_tmp_dir
|
27
28
|
save_exported_zip_file
|
28
29
|
unzip_files
|
30
|
+
clean_destination_directory
|
29
31
|
copy_export_files_to_destination_path if destination_path
|
32
|
+
|
33
|
+
Dir.glob("#{destination_path_with_dash_id}/**/*").select { |f| File.file?(f) }
|
34
|
+
rescue StandardError => e
|
35
|
+
raise
|
36
|
+
ensure
|
37
|
+
cleanup_temp_dir
|
30
38
|
end
|
31
39
|
|
32
40
|
def response
|
@@ -37,6 +45,10 @@ module Superset
|
|
37
45
|
)
|
38
46
|
end
|
39
47
|
|
48
|
+
def zip_file_name
|
49
|
+
@zip_file_name ||= "#{tmp_uniq_dashboard_path}/dashboard_#{dashboard_id}_export_#{datestamp}.zip"
|
50
|
+
end
|
51
|
+
|
40
52
|
private
|
41
53
|
|
42
54
|
def params
|
@@ -44,10 +56,12 @@ module Superset
|
|
44
56
|
end
|
45
57
|
|
46
58
|
def save_exported_zip_file
|
59
|
+
logger.info("Saving zip file: #{zip_file_name}")
|
47
60
|
File.open(zip_file_name, 'wb') { |fp| fp.write(response.body) }
|
48
61
|
end
|
49
62
|
|
50
63
|
def unzip_files
|
64
|
+
logger.info("Unzipping file: #{zip_file_name}")
|
51
65
|
@extracted_files = unzip_file(zip_file_name, tmp_uniq_dashboard_path)
|
52
66
|
end
|
53
67
|
|
@@ -55,25 +69,43 @@ module Superset
|
|
55
69
|
File.dirname(extracted_files[0])
|
56
70
|
end
|
57
71
|
|
72
|
+
def destination_path_with_dash_id
|
73
|
+
@destination_path_with_dash_id ||= File.join(destination_path, dashboard_id.to_s)
|
74
|
+
end
|
75
|
+
|
76
|
+
def clean_destination_directory
|
77
|
+
logger.info("Cleaning destination directory: #{destination_path_with_dash_id}")
|
78
|
+
if Dir.exist?(destination_path_with_dash_id)
|
79
|
+
FileUtils.rm_rf(Dir.glob("#{destination_path_with_dash_id}/*"))
|
80
|
+
else
|
81
|
+
FileUtils.mkdir_p(destination_path_with_dash_id)
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
58
85
|
def copy_export_files_to_destination_path
|
86
|
+
logger.info("Copying files to destination: #{destination_path_with_dash_id}")
|
59
87
|
path_with_dash_id = File.join(destination_path, dashboard_id.to_s)
|
60
88
|
FileUtils.mkdir_p(path_with_dash_id) unless File.directory?(path_with_dash_id)
|
89
|
+
FileUtils.cp(zip_file_name, path_with_dash_id)
|
61
90
|
|
62
91
|
Dir.glob("#{download_folder}/*").each do |item|
|
63
92
|
FileUtils.cp_r(item, path_with_dash_id)
|
64
93
|
end
|
65
94
|
end
|
66
95
|
|
67
|
-
def
|
68
|
-
|
96
|
+
def cleanup_temp_dir
|
97
|
+
if Dir.exist?(tmp_uniq_dashboard_path)
|
98
|
+
FileUtils.rm_rf(tmp_uniq_dashboard_path)
|
99
|
+
end
|
69
100
|
end
|
70
101
|
|
71
102
|
def create_tmp_dir
|
103
|
+
logger.info("Creating tmp directory: #{tmp_uniq_dashboard_path}")
|
72
104
|
FileUtils.mkdir_p(tmp_uniq_dashboard_path) unless File.directory?(tmp_uniq_dashboard_path)
|
73
105
|
end
|
74
106
|
|
75
107
|
# uniq random tmp folder name for each export
|
76
|
-
# this will allow us to do a wildcard glop on the folder to get the files
|
108
|
+
# this will allow us to do a wildcard glop on the folder to get the files
|
77
109
|
def tmp_uniq_dashboard_path
|
78
110
|
@tmp_uniq_dashboard_path ||= File.join(TMP_SUPERSET_DASHBOARD_PATH, uuid)
|
79
111
|
end
|
@@ -93,6 +125,25 @@ module Superset
|
|
93
125
|
def datestamp
|
94
126
|
@datestamp ||= Time.now.strftime('%Y%m%d')
|
95
127
|
end
|
128
|
+
|
129
|
+
def unzip_file(zip_path, destination)
|
130
|
+
extracted_files = []
|
131
|
+
Zip::File.open(zip_path) do |zip_file|
|
132
|
+
zip_file.each do |entry|
|
133
|
+
entry_path = File.join(destination, entry.name)
|
134
|
+
FileUtils.mkdir_p(File.dirname(entry_path))
|
135
|
+
zip_file.extract(entry, entry_path) unless File.exist?(entry_path)
|
136
|
+
extracted_files << entry_path
|
137
|
+
end
|
138
|
+
end
|
139
|
+
extracted_files
|
140
|
+
rescue => e
|
141
|
+
raise
|
142
|
+
end
|
143
|
+
end
|
144
|
+
|
145
|
+
def logger
|
146
|
+
@logger ||= Superset::Logger.new
|
96
147
|
end
|
97
148
|
end
|
98
|
-
end
|
149
|
+
end
|
@@ -4,6 +4,7 @@ module Superset
|
|
4
4
|
|
5
5
|
attr_reader :id
|
6
6
|
|
7
|
+
# note .. this endpoint also accepts a dashboards uuid as the identifier
|
7
8
|
def initialize(id)
|
8
9
|
@id = id
|
9
10
|
end
|
@@ -24,6 +25,10 @@ module Superset
|
|
24
25
|
def json_metadata
|
25
26
|
JSON.parse(result['json_metadata'])
|
26
27
|
end
|
28
|
+
|
29
|
+
def filter_configuration
|
30
|
+
json_metadata['native_filter_configuration'] || []
|
31
|
+
end
|
27
32
|
|
28
33
|
def positions
|
29
34
|
JSON.parse(result['position_json'])
|
@@ -0,0 +1,84 @@
|
|
1
|
+
# Import the provided Dashboard zip file
|
2
|
+
# In the context of this API import process, assumption is that the database.yaml file details will match
|
3
|
+
# an existing database in the Target Superset Environment.
|
4
|
+
|
5
|
+
# Scenario 1: Export from Env1 -- Import to Env1 into the SAME Environment
|
6
|
+
# Will result in updating/over writing the dashboard with the contents of the zip file
|
7
|
+
|
8
|
+
# Scenario 2: Export from Env1 -- Import to Env2 into a DIFFERENT Environment
|
9
|
+
# Assumption is that the database.yaml will match a database configuration in the target env.
|
10
|
+
# Initial import will result in creating a new dashboard with the contents of the zip file.
|
11
|
+
# Subsequent imports will result in updating/over writing the previous imported dashboard with the contents of the zip file.
|
12
|
+
|
13
|
+
# the overwrite flag will determine if the dashboard will be updated or created new
|
14
|
+
# overwrite: false .. will result in an error if a dashboard with the same UUID already exists
|
15
|
+
|
16
|
+
# Usage
|
17
|
+
# Superset::Dashboard::Import.new(source_zip_file: '/tmp/dashboard.zip').perform
|
18
|
+
#
|
19
|
+
|
20
|
+
module Superset
|
21
|
+
module Dashboard
|
22
|
+
class Import < Request
|
23
|
+
attr_reader :source_zip_file, :overwrite
|
24
|
+
|
25
|
+
def initialize(source_zip_file: , overwrite: true)
|
26
|
+
@source_zip_file = source_zip_file
|
27
|
+
@overwrite = overwrite
|
28
|
+
end
|
29
|
+
|
30
|
+
def perform
|
31
|
+
validate_params
|
32
|
+
response
|
33
|
+
end
|
34
|
+
|
35
|
+
def response
|
36
|
+
@response ||= client(use_json: false).post(
|
37
|
+
route,
|
38
|
+
payload
|
39
|
+
)
|
40
|
+
end
|
41
|
+
|
42
|
+
private
|
43
|
+
|
44
|
+
def validate_params
|
45
|
+
raise ArgumentError, 'source_zip_file is required' if source_zip_file.nil?
|
46
|
+
raise ArgumentError, 'source_zip_file does not exist' unless File.exist?(source_zip_file)
|
47
|
+
raise ArgumentError, 'source_zip_file is not a zip file' unless File.extname(source_zip_file) == '.zip'
|
48
|
+
raise ArgumentError, 'overwrite must be a boolean' unless [true, false].include?(overwrite)
|
49
|
+
raise ArgumentError, "zip target database does not exist: #{zip_database_config_not_found_in_superset}" if zip_database_config_not_found_in_superset.present?
|
50
|
+
end
|
51
|
+
|
52
|
+
def payload
|
53
|
+
{
|
54
|
+
formData: Faraday::UploadIO.new(source_zip_file, 'application/zip'),
|
55
|
+
overwrite: overwrite.to_s
|
56
|
+
}
|
57
|
+
end
|
58
|
+
|
59
|
+
def route
|
60
|
+
"dashboard/import/"
|
61
|
+
end
|
62
|
+
|
63
|
+
def zip_database_config_not_found_in_superset
|
64
|
+
zip_databases_details.select {|s| !superset_database_uuids_found.include?(s[:uuid]) }
|
65
|
+
end
|
66
|
+
|
67
|
+
def superset_database_uuids_found
|
68
|
+
@superset_database_uuids_found ||= begin
|
69
|
+
zip_databases_details.map {|i| i[:uuid]}.map do |uuid|
|
70
|
+
uuid if Superset::Database::List.new(uuid_equals: uuid).result.present?
|
71
|
+
end.compact
|
72
|
+
end
|
73
|
+
end
|
74
|
+
|
75
|
+
def zip_databases_details
|
76
|
+
zip_dashboard_config[:databases].map{|d| {uuid: d[:content][:uuid], name: d[:content][:database_name]} }
|
77
|
+
end
|
78
|
+
|
79
|
+
def zip_dashboard_config
|
80
|
+
@zip_dashboard_config ||= Superset::Services::DashboardLoader.new(dashboard_export_zip: source_zip_file).perform
|
81
|
+
end
|
82
|
+
end
|
83
|
+
end
|
84
|
+
end
|