superset 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. checksums.yaml +7 -0
  2. data/.buildkite/pipeline.yml +16 -0
  3. data/.rspec +3 -0
  4. data/.rubocop.yml +13 -0
  5. data/CHANGELOG.md +48 -0
  6. data/Dockerfile +17 -0
  7. data/LICENSE +21 -0
  8. data/README.md +205 -0
  9. data/Rakefile +12 -0
  10. data/doc/duplicate_dashboards.md +214 -0
  11. data/doc/setting_up_personal_api_credentials.md +127 -0
  12. data/docker-compose.override.yml +10 -0
  13. data/docker-compose.yml +8 -0
  14. data/env.sample +9 -0
  15. data/lib/loggers/duplicate_dashboard_logger.rb +15 -0
  16. data/lib/superset/authenticator.rb +55 -0
  17. data/lib/superset/chart/bulk_delete.rb +40 -0
  18. data/lib/superset/chart/delete.rb +30 -0
  19. data/lib/superset/chart/get.rb +56 -0
  20. data/lib/superset/chart/list.rb +59 -0
  21. data/lib/superset/chart/update_dataset.rb +90 -0
  22. data/lib/superset/client.rb +53 -0
  23. data/lib/superset/credential/api_user.rb +25 -0
  24. data/lib/superset/credential/embedded_user.rb +25 -0
  25. data/lib/superset/dashboard/bulk_delete.rb +42 -0
  26. data/lib/superset/dashboard/bulk_delete_cascade.rb +52 -0
  27. data/lib/superset/dashboard/charts/list.rb +47 -0
  28. data/lib/superset/dashboard/compare.rb +94 -0
  29. data/lib/superset/dashboard/copy.rb +78 -0
  30. data/lib/superset/dashboard/datasets/list.rb +74 -0
  31. data/lib/superset/dashboard/delete.rb +42 -0
  32. data/lib/superset/dashboard/embedded/get.rb +56 -0
  33. data/lib/superset/dashboard/embedded/put.rb +35 -0
  34. data/lib/superset/dashboard/export.rb +98 -0
  35. data/lib/superset/dashboard/get.rb +51 -0
  36. data/lib/superset/dashboard/info.rb +17 -0
  37. data/lib/superset/dashboard/list.rb +99 -0
  38. data/lib/superset/dashboard/put.rb +37 -0
  39. data/lib/superset/dashboard/warm_up_cache.rb +42 -0
  40. data/lib/superset/database/get.rb +30 -0
  41. data/lib/superset/database/get_schemas.rb +25 -0
  42. data/lib/superset/database/list.rb +51 -0
  43. data/lib/superset/dataset/bulk_delete.rb +41 -0
  44. data/lib/superset/dataset/create.rb +62 -0
  45. data/lib/superset/dataset/delete.rb +30 -0
  46. data/lib/superset/dataset/duplicate.rb +62 -0
  47. data/lib/superset/dataset/get.rb +56 -0
  48. data/lib/superset/dataset/list.rb +41 -0
  49. data/lib/superset/dataset/update_query.rb +56 -0
  50. data/lib/superset/dataset/update_schema.rb +120 -0
  51. data/lib/superset/dataset/warm_up_cache.rb +41 -0
  52. data/lib/superset/display.rb +42 -0
  53. data/lib/superset/enumerations/object_type.rb +11 -0
  54. data/lib/superset/file_utilities.rb +19 -0
  55. data/lib/superset/guest_token.rb +69 -0
  56. data/lib/superset/logger.rb +20 -0
  57. data/lib/superset/request.rb +62 -0
  58. data/lib/superset/route_info.rb +34 -0
  59. data/lib/superset/security/permissions_resources/list.rb +22 -0
  60. data/lib/superset/security/role/create.rb +25 -0
  61. data/lib/superset/security/role/get.rb +32 -0
  62. data/lib/superset/security/role/list.rb +45 -0
  63. data/lib/superset/security/role/permission/create.rb +35 -0
  64. data/lib/superset/security/role/permission/get.rb +37 -0
  65. data/lib/superset/security/user/create.rb +49 -0
  66. data/lib/superset/security/user/get.rb +27 -0
  67. data/lib/superset/security/user/list.rb +42 -0
  68. data/lib/superset/services/duplicate_dashboard.rb +298 -0
  69. data/lib/superset/sqllab/execute.rb +52 -0
  70. data/lib/superset/tag/add_to_object.rb +46 -0
  71. data/lib/superset/tag/get.rb +30 -0
  72. data/lib/superset/tag/list.rb +37 -0
  73. data/lib/superset/version.rb +5 -0
  74. data/lib/superset.rb +17 -0
  75. data/log/README.md +4 -0
  76. data/superset.gemspec +55 -0
  77. metadata +300 -0
@@ -0,0 +1,99 @@
1
+ # Usage: Superset::Dashboard::List.new.list
2
+ # Usage: Superset::Dashboard::List.new(page_num: 1, title_contains: 'Test').list
3
+ # Usage: Superset::Dashboard::List.new(tags_equal: ['embedded', 'product:acme']).list
4
+
5
+ module Superset
6
+ module Dashboard
7
+ class List < Superset::Request
8
+ attr_reader :title_contains, :tags_equal, :ids_not_in
9
+
10
+ def initialize(page_num: 0, title_contains: '', tags_equal: [], ids_not_in: [])
11
+ @title_contains = title_contains
12
+ @tags_equal = tags_equal
13
+ @ids_not_in = ids_not_in
14
+ super(page_num: page_num)
15
+ end
16
+
17
+ def self.call
18
+ self.new.list
19
+ end
20
+
21
+ def response
22
+ validate_constructor_args
23
+ super
24
+ end
25
+
26
+ def all
27
+ result.map do |d|
28
+ OpenStruct.new(
29
+ list_attributes.map { |la| [la, d[la]] }.to_h.
30
+ merge(retrieve_schemas(d[:id])).
31
+ merge(retrieve_embedded_details(d[:id]))
32
+ )
33
+ end
34
+ end
35
+
36
+ def retrieve_schemas(id)
37
+ { schemas: Datasets::List.new(id).schemas }
38
+ rescue StandardError => e
39
+ # within Superset, a bug exists around deleting dashboards failing and the corrupting datasets configs, so handle errored datasets gracefully
40
+ # ref NEP-17532
41
+ {}
42
+ end
43
+
44
+ def retrieve_embedded_details(id)
45
+ embedded_dashboard = Dashboard::Embedded::Get.new(id)
46
+ { allowed_embedded_domains: embedded_dashboard.allowed_domains,
47
+ uuid: embedded_dashboard.uuid,}
48
+ end
49
+
50
+ def rows
51
+ result.map do |d|
52
+ list_attributes.map do |la|
53
+ la == :url ? "#{superset_host}#{d[la]}" : d[la]
54
+ end
55
+ end
56
+ end
57
+
58
+ def ids
59
+ result.map { |d| d[:id] }
60
+ end
61
+
62
+ private
63
+
64
+ def route
65
+ "dashboard/?q=(#{query_params})"
66
+ end
67
+
68
+ def filters
69
+ # TODO filtering across all list classes can be refactored to support multiple options in a more flexible way
70
+ filter_set = []
71
+ filter_set << "(col:dashboard_title,opr:ct,value:'#{title_contains}')" if title_contains.present?
72
+ filter_set << tag_filters if tags_equal.present?
73
+ filter_set << ids_not_in_filters if ids_not_in.present?
74
+ unless filter_set.empty?
75
+ "filters:!(" + filter_set.join(',') + "),"
76
+ end
77
+ end
78
+
79
+ def tag_filters
80
+ tags_equal.map {|tag| "(col:tags,opr:dashboard_tags,value:'#{tag}')"}.join(',')
81
+ end
82
+
83
+ def ids_not_in_filters
84
+ ids_not_in.map {|id| "(col:id,opr:neq,value:'#{id}')"}.join(',')
85
+ end
86
+
87
+ def list_attributes
88
+ [:id, :dashboard_title, :status, :url]
89
+ end
90
+
91
+ def validate_constructor_args
92
+ raise InvalidParameterError, "title_contains must be a String type" unless title_contains.is_a?(String)
93
+ raise InvalidParameterError, "tags_equal must be an Array type" unless tags_equal.is_a?(Array)
94
+ raise InvalidParameterError, "tags_equal array must contain string only values" unless tags_equal.all? { |item| item.is_a?(String) }
95
+ raise InvalidParameterError, "ids_not_in must be an Array type" unless ids_not_in.is_a?(Array)
96
+ end
97
+ end
98
+ end
99
+ end
@@ -0,0 +1,37 @@
1
+
2
+ # frozen_string_literal: true
3
+
4
+ module Superset
5
+ module Dashboard
6
+ class Put < Superset::Request
7
+
8
+ attr_reader :target_dashboard_id, :params
9
+
10
+ def initialize(target_dashboard_id:, params:)
11
+ @target_dashboard_id = target_dashboard_id
12
+ @params = params
13
+ end
14
+
15
+ def perform
16
+ raise "Error: target_dashboard_id integer is required" unless target_dashboard_id.present? && target_dashboard_id.is_a?(Integer)
17
+ raise "Error: params hash is required" unless params.present? && params.is_a?(Hash)
18
+
19
+ response
20
+ end
21
+
22
+ def response
23
+ @response ||= client.put(route, params)
24
+ end
25
+
26
+ def id
27
+ response["result"]["id"]
28
+ end
29
+
30
+ private
31
+
32
+ def route
33
+ "dashboard/#{target_dashboard_id}"
34
+ end
35
+ end
36
+ end
37
+ end
@@ -0,0 +1,42 @@
1
+ module Superset
2
+ module Dashboard
3
+ class WarmUpCache < Superset::Request
4
+
5
+ attr_reader :dashboard_id
6
+
7
+ def initialize(dashboard_id:)
8
+ @dashboard_id = dashboard_id
9
+ end
10
+
11
+ def perform
12
+ validate_dashboard_id
13
+ response
14
+ end
15
+
16
+ def response
17
+ dataset_details = fetch_dataset_details(dashboard_id)
18
+ dataset_details.each do |dataset|
19
+ begin
20
+ warm_up_dataset(dataset["datasource_name"], dataset["name"])
21
+ rescue => e
22
+ Rollbar.error("Warm up cache failed for the dashboard #{dashboard_id.to_s} and for the dataset #{dataset["datasource_name"]} - #{e}")
23
+ end
24
+ end
25
+ end
26
+
27
+ def warm_up_dataset(dataset_name, db_name)
28
+ Superset::Dataset::WarmUpCache.new(dashboard_id: dashboard_id, table_name: dataset_name, db_name: db_name).perform
29
+ end
30
+
31
+ private
32
+
33
+ def validate_dashboard_id
34
+ raise InvalidParameterError, "dashboard_id must be present and must be an integer" unless dashboard_id.present? && dashboard_id.is_a?(Integer)
35
+ end
36
+
37
+ def fetch_dataset_details(dashboard_id)
38
+ Superset::Dashboard::Datasets::List.new(dashboard_id).datasets_details.map { |dataset| dataset['database'].slice('name').merge(dataset.slice('datasource_name'))}
39
+ end
40
+ end
41
+ end
42
+ end
@@ -0,0 +1,30 @@
1
+ module Superset
2
+ module Database
3
+ class Get < Superset::Request
4
+
5
+ attr_reader :id
6
+
7
+ def initialize(id)
8
+ @id = id
9
+ end
10
+
11
+ def self.call(id)
12
+ self.new(id).list
13
+ end
14
+
15
+ def result
16
+ [ super ]
17
+ end
18
+
19
+ private
20
+
21
+ def route
22
+ "database/#{id}"
23
+ end
24
+
25
+ def list_attributes
26
+ %w(id database_name backend driver expose_in_sqllab cache_timeout allow_file_upload)
27
+ end
28
+ end
29
+ end
30
+ end
@@ -0,0 +1,25 @@
1
+ module Superset
2
+ module Database
3
+ class GetSchemas < Superset::Request
4
+
5
+ alias :schemas :result
6
+ alias :list :result
7
+
8
+ attr_reader :id
9
+
10
+ def initialize(id)
11
+ @id = id
12
+ end
13
+
14
+ def self.call(id)
15
+ self.new(id).schemas
16
+ end
17
+
18
+ private
19
+
20
+ def route
21
+ "database/#{id}/schemas/"
22
+ end
23
+ end
24
+ end
25
+ end
@@ -0,0 +1,51 @@
1
+ # Usage: Superset::Database::List.call
2
+ # Usage: Superset::Dashboard::List.new(title_contains: 'test').list
3
+
4
+ module Superset
5
+ module Database
6
+ class List < Superset::Request
7
+ attr_reader :title_contains
8
+
9
+ def initialize(page_num: 0, title_contains: '')
10
+ @title_contains = title_contains
11
+ super(page_num: page_num)
12
+ end
13
+
14
+ def self.call
15
+ self.new.list
16
+ end
17
+
18
+ def response
19
+ validate_constructor_args
20
+ super
21
+ end
22
+
23
+ def ids
24
+ result.map { |d| d[:id] }
25
+ end
26
+
27
+ private
28
+
29
+ def route
30
+ "database/?q=(#{query_params})"
31
+ end
32
+
33
+ def filters
34
+ # TODO filtering across all list classes can be refactored to support multiple options in a more flexible way
35
+ filter_set = []
36
+ filter_set << "(col:database_name,opr:ct,value:'#{title_contains}')" if title_contains.present?
37
+ unless filter_set.empty?
38
+ "filters:!(" + filter_set.join(',') + "),"
39
+ end
40
+ end
41
+
42
+ def list_attributes
43
+ [:id, :database_name, :backend, :expose_in_sqllab]
44
+ end
45
+
46
+ def validate_constructor_args
47
+ raise InvalidParameterError, "title_contains must be a String type" unless title_contains.is_a?(String)
48
+ end
49
+ end
50
+ end
51
+ end
@@ -0,0 +1,41 @@
1
+ # frozen_string_literal: true
2
+
3
+ # TODO: the gui delete has a confirmation step, this API call does not.
4
+ # Potentially we could add a confirm_delete parameter to the constructor that would ensure that all datasets either
5
+ # 1 belong to only an expected charts or filters before deleting
6
+ # 2 or do not belong to any charts or filters
7
+ # ( not sure if this needed at this point )
8
+
9
+ module Superset
10
+ module Dataset
11
+ class BulkDelete < Superset::Request
12
+ attr_reader :dataset_ids
13
+
14
+ def initialize(dataset_ids: [])
15
+ @dataset_ids = dataset_ids
16
+ end
17
+
18
+ def perform
19
+ raise InvalidParameterError, "dataset_ids array of integers expected" unless dataset_ids.is_a?(Array)
20
+ raise InvalidParameterError, "dataset_ids array must contain Integer only values" unless dataset_ids.all? { |item| item.is_a?(Integer) }
21
+
22
+ logger.info("Attempting to delete datasets with id: #{dataset_ids.join(', ')}")
23
+ response
24
+ end
25
+
26
+ def response
27
+ @response ||= client.delete(route, params)
28
+ end
29
+
30
+ private
31
+
32
+ def params
33
+ { q: "!(#{dataset_ids.join(',')})" }
34
+ end
35
+
36
+ def route
37
+ "dataset/"
38
+ end
39
+ end
40
+ end
41
+ end
@@ -0,0 +1,62 @@
1
+ # The API demands that the new_dataset_name be uniq within the schema it points to.
2
+ # same behaviour as in the GUI
3
+
4
+ module Superset
5
+ module Dataset
6
+ class Create < Superset::Request
7
+
8
+ attr_reader :target_database_id, :new_dataset_name, :new_dataset_schema, :sql
9
+
10
+ def initialize(target_database_id: , new_dataset_name: , new_dataset_schema: 'public', sql: )
11
+ @target_database_id = target_database_id
12
+ @new_dataset_name = new_dataset_name
13
+ @new_dataset_schema = new_dataset_schema
14
+ @sql = sql
15
+ end
16
+
17
+ def perform
18
+ raise "Error: target_database_id integer is required" unless target_database_id.present? && target_database_id.is_a?(Integer)
19
+ raise "Error: new_dataset_name string is required" unless new_dataset_name.present? && new_dataset_name.is_a?(String)
20
+ raise "Error: Dataset Name #{new_dataset_name} is already in use in the schema: #{new_dataset_schema}. Suggest you add (COPY) as a suffix to the name" if new_dataset_name_already_in_use?
21
+ raise "Error: sql string is required" unless sql.present? && sql.is_a?(String)
22
+
23
+ logger.info("Creating New Dataset #{new_dataset_name} in DB #{target_database_id} Schema #{new_dataset_schema}")
24
+
25
+ response
26
+ { id: response['id'], dataset_name: response['data']['datasource_name'] }
27
+ end
28
+
29
+ def response
30
+ @response ||= client.post(route, params)
31
+ end
32
+
33
+ def params
34
+ {
35
+ "schema": new_dataset_schema,
36
+ "sql": sql,
37
+ "table_name": new_dataset_name,
38
+ "database": target_database_id
39
+
40
+ # Optional Params .. pulled straight from the GUI swagger example
41
+
42
+ #"always_filter_main_dttm": false,
43
+ #"external_url": "string",
44
+ #"is_managed_externally": false,
45
+ #"normalize_columns": false,
46
+ # "owners": [ 0 ],
47
+ }
48
+ end
49
+
50
+ private
51
+
52
+ # The API demands that the new_dataset_name be uniq within the schema it points to.
53
+ def new_dataset_name_already_in_use?
54
+ Dataset::List.new(title_equals: new_dataset_name, schema_equals: new_dataset_schema, database_id_eq: target_database_id).result.any?
55
+ end
56
+
57
+ def route
58
+ "dataset/"
59
+ end
60
+ end
61
+ end
62
+ end
@@ -0,0 +1,30 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Superset
4
+ module Dataset
5
+ class Delete < Superset::Request
6
+ attr_reader :dataset_id
7
+
8
+ def initialize(dataset_id: )
9
+ @dataset_id = dataset_id
10
+ end
11
+
12
+ def perform
13
+ raise InvalidParameterError, "dataset_id integer is required" unless dataset_id.present? && dataset_id.is_a?(Integer)
14
+
15
+ logger.info("Attempting to delete dataset with id: #{dataset_id}")
16
+ response
17
+ end
18
+
19
+ def response
20
+ @response ||= client.delete(route)
21
+ end
22
+
23
+ private
24
+
25
+ def route
26
+ "dataset/#{dataset_id}"
27
+ end
28
+ end
29
+ end
30
+ end
@@ -0,0 +1,62 @@
1
+ # The API demands that the new_dataset_name be uniq within the schema it points to.
2
+ # same behaviour as in the GUI
3
+
4
+ module Superset
5
+ module Dataset
6
+ class Duplicate < Superset::Request
7
+
8
+ attr_reader :source_dataset_id, :new_dataset_name
9
+
10
+ def initialize(source_dataset_id: , new_dataset_name: )
11
+ @source_dataset_id = source_dataset_id
12
+ @new_dataset_name = new_dataset_name
13
+ end
14
+
15
+ def perform
16
+ raise "Error: source_dataset_id integer is required" unless source_dataset_id.present? && source_dataset_id.is_a?(Integer)
17
+ raise "Error: new_dataset_name string is required" unless new_dataset_name.present? && new_dataset_name.is_a?(String)
18
+ raise "Error: new_dataset_name already in use in this schema: #{new_dataset_name}. Suggest you add (COPY) as a suffix to the name" if new_dataset_name_already_in_use?
19
+
20
+ logger.info("Duplicating Source Dataset #{source_dataset.title} with id #{source_dataset_id}")
21
+
22
+ new_dataset_id
23
+ end
24
+
25
+ def response
26
+ @response ||= client.post(route, params)
27
+ end
28
+
29
+ def params
30
+ {
31
+ "base_model_id" => source_dataset_id,
32
+ "table_name" => new_dataset_name
33
+ }
34
+ end
35
+
36
+ private
37
+
38
+ def source_dataset
39
+ @source_dataset ||= Dataset::Get.new(source_dataset_id).perform
40
+ end
41
+
42
+ # The API demands that the new_dataset_name be uniq within the schema it points to.
43
+ def new_dataset_name_already_in_use?
44
+ Dataset::List.new(title_equals: new_dataset_name, schema_equals: source_dataset.schema).result.any?
45
+ end
46
+
47
+ def new_dataset_id
48
+ if response["id"].present?
49
+ logger.info(" Finished. Duplicate Dataset Name #{new_dataset_name} with id #{response['id']}")
50
+ response["id"]
51
+ else
52
+ logger.error("Error: Unable to duplicate dataset: #{response}")
53
+ raise "Error: Unable to duplicate dataset: #{response}"
54
+ end
55
+ end
56
+
57
+ def route
58
+ "dataset/duplicate"
59
+ end
60
+ end
61
+ end
62
+ end
@@ -0,0 +1,56 @@
1
+ module Superset
2
+ module Dataset
3
+ class Get < Superset::Request
4
+
5
+ attr_reader :id
6
+
7
+ def initialize(id)
8
+ @id = id
9
+ end
10
+
11
+ def self.call(id)
12
+ self.new(id).list
13
+ end
14
+
15
+ def perform
16
+ response
17
+ self
18
+ end
19
+
20
+ def rows
21
+ [ [title, schema, database_name, database_id] ]
22
+ end
23
+
24
+ def schema
25
+ result['schema']
26
+ end
27
+
28
+ def title
29
+ result['name']
30
+ end
31
+
32
+ private
33
+
34
+ def route
35
+ "dataset/#{id}"
36
+ end
37
+
38
+ def display_headers
39
+ %w[title schema database_name, database_id]
40
+ end
41
+
42
+
43
+ def database_name
44
+ result['database']['database_name']
45
+ end
46
+
47
+ def database_id
48
+ result['database']['id']
49
+ end
50
+
51
+ def sql
52
+ ['sql']
53
+ end
54
+ end
55
+ end
56
+ end
@@ -0,0 +1,41 @@
1
+ module Superset
2
+ module Dataset
3
+ class List < Superset::Request
4
+ attr_reader :title_contains, :title_equals, :schema_equals, :database_id_eq
5
+
6
+ def initialize(page_num: 0, title_contains: '', title_equals: '', schema_equals: '', database_id_eq: '')
7
+ @title_contains = title_contains
8
+ @title_equals = title_equals
9
+ @schema_equals = schema_equals
10
+ @database_id_eq = database_id_eq
11
+ super(page_num: page_num)
12
+ end
13
+
14
+ def self.call
15
+ self.new.list
16
+ end
17
+
18
+ private
19
+
20
+ def route
21
+ "dataset/?q=(#{query_params})"
22
+ end
23
+
24
+ def filters
25
+ # TODO filtering across all list classes can be refactored to support multiple options in a more flexible way
26
+ filters = []
27
+ filters << "(col:table_name,opr:ct,value:'#{title_contains}')" if title_contains.present?
28
+ filters << "(col:table_name,opr:eq,value:'#{title_equals}')" if title_equals.present?
29
+ filters << "(col:schema,opr:eq,value:'#{schema_equals}')" if schema_equals.present?
30
+ filters << "(col:database,opr:rel_o_m,value:#{database_id_eq})" if database_id_eq.present? # rel one to many
31
+ unless filters.empty?
32
+ "filters:!(" + filters.join(',') + "),"
33
+ end
34
+ end
35
+
36
+ def list_attributes
37
+ ['id', 'table_name', 'schema', 'changed_by_name']
38
+ end
39
+ end
40
+ end
41
+ end
@@ -0,0 +1,56 @@
1
+ module Superset
2
+ module Dataset
3
+ class UpdateQuery < Superset::Request
4
+
5
+ attr_reader :new_query, :dataset_id
6
+
7
+ def initialize(dataset_id: ,new_query: )
8
+ @new_query = new_query
9
+ @dataset_id = dataset_id
10
+ end
11
+
12
+ def perform
13
+ validate_proposed_changes
14
+
15
+ response
16
+ end
17
+
18
+ def response
19
+ @response ||= client.put(route, params)
20
+ end
21
+
22
+ def params
23
+ { "sql": new_query }
24
+ end
25
+
26
+ # check if the sql query embedds the schema name, if so it can not be duplicated cleanly
27
+ def sql_query_includes_hard_coded_schema?
28
+ new_query.include?("#{source_dataset['schema']}.")
29
+ end
30
+
31
+ def source_dataset
32
+ # will raise an error if the dataset does not exist
33
+ @source_dataset ||= begin
34
+ dataset = Get.new(dataset_id)
35
+ dataset.result
36
+ end
37
+ end
38
+
39
+ private
40
+
41
+ def validate_proposed_changes
42
+ logger.info " Validating Dataset ID: #{dataset_id} query update to '#{new_query}'"
43
+ raise "Error: dataset_id integer is required" unless dataset_id.present? && dataset_id.is_a?(Integer)
44
+ raise "Error: new_query string is required" unless new_query.present? && new_query.is_a?(String)
45
+
46
+ # does the sql query hard code the current schema name?
47
+ raise "Error: >>WARNING<< The Dataset ID #{dataset_id} SQL query is hard coded with the schema value and can not be duplicated cleanly. " +
48
+ "Remove all direct embedded schema calls from the Dataset SQL query before continuing." if sql_query_includes_hard_coded_schema?
49
+ end
50
+
51
+ def route
52
+ "dataset/#{dataset_id}"
53
+ end
54
+ end
55
+ end
56
+ end