iij-dag-client 1.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. checksums.yaml +7 -0
  2. data/.gitignore +18 -0
  3. data/Gemfile +13 -0
  4. data/LICENSE.txt +174 -0
  5. data/Rakefile +43 -0
  6. data/config/settings.yml +11 -0
  7. data/iij-dag-client.gemspec +31 -0
  8. data/lib/dag.rb +33 -0
  9. data/lib/dag/client.rb +36 -0
  10. data/lib/dag/client/api.rb +295 -0
  11. data/lib/dag/client/api/cluster.rb +111 -0
  12. data/lib/dag/client/api/database.rb +58 -0
  13. data/lib/dag/client/api/job.rb +116 -0
  14. data/lib/dag/client/api/list_params.rb +36 -0
  15. data/lib/dag/client/api/rest_parameter.rb +149 -0
  16. data/lib/dag/client/api/storage.rb +354 -0
  17. data/lib/dag/client/api/storage_result.rb +52 -0
  18. data/lib/dag/client/api/table.rb +131 -0
  19. data/lib/dag/client/cluster.rb +26 -0
  20. data/lib/dag/client/cluster_validation.rb +59 -0
  21. data/lib/dag/client/database.rb +79 -0
  22. data/lib/dag/client/exception.rb +43 -0
  23. data/lib/dag/client/job.rb +56 -0
  24. data/lib/dag/client/job_validation.rb +22 -0
  25. data/lib/dag/client/model.rb +9 -0
  26. data/lib/dag/client/model/bucket.rb +20 -0
  27. data/lib/dag/client/model/bucket_collection.rb +34 -0
  28. data/lib/dag/client/model/cluster.rb +100 -0
  29. data/lib/dag/client/model/cluster_collection.rb +76 -0
  30. data/lib/dag/client/model/database.rb +34 -0
  31. data/lib/dag/client/model/database_collection.rb +51 -0
  32. data/lib/dag/client/model/job.rb +125 -0
  33. data/lib/dag/client/model/job_collection.rb +114 -0
  34. data/lib/dag/client/model/object.rb +56 -0
  35. data/lib/dag/client/model/object_collection.rb +64 -0
  36. data/lib/dag/client/model/table.rb +55 -0
  37. data/lib/dag/client/model/table_collection.rb +60 -0
  38. data/lib/dag/client/storage.rb +41 -0
  39. data/lib/dag/client/table.rb +16 -0
  40. data/lib/dag/client/version.rb +5 -0
  41. data/lib/dag/settings.rb +9 -0
  42. metadata +210 -0
@@ -0,0 +1,26 @@
1
+ module Dag
2
+ class Client
3
+ module Cluster
4
+ include Dag::Client::ClusterValidation
5
+
6
+ attr_reader :cluster_name
7
+
8
+ def open(cluster_name)
9
+ @cluster_name = cluster_name
10
+ self
11
+ end
12
+
13
+ def clusters
14
+ Dag::ClusterCollection.new(@api)
15
+ end
16
+
17
+ def cluster
18
+ if @cluster_name.blank?
19
+ raise Dag::Client::ClusterNotOpen.new("cluster not opened")
20
+ end
21
+ cluster_status
22
+ Dag::Cluster.new(@api, @cluster_info.merge("name" => @cluster_name))
23
+ end
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,59 @@
1
+ module Dag
2
+ class Client
3
+ module ClusterValidation
4
+ def cluster_status
5
+ return @cluster_status if @cluster_status
6
+
7
+ if @cluster_info
8
+ return @cluster_info['status']
9
+ end
10
+
11
+ return unless @cluster_name
12
+ @cluster_info = @api.cluster_info(@cluster_name)
13
+ @cluster_status = @cluster_info['status']
14
+
15
+ @cluster_status
16
+ end
17
+
18
+ def valid_cluster_status?
19
+ ['init', 'reserved', 'stopped', 'restarting', 'norm', 'failed', 'ptfailed'].include?(cluster_status)
20
+ end
21
+
22
+ def valid_cluster_info_list_status?(status = nil)
23
+ status = cluster_status if status.nil?
24
+ ['init', 'reserved', 'stopped', 'starting', 'restarting', 'norm', 'failed', 'ptfailed', 'error'].include?(status)
25
+ end
26
+
27
+ def cluster_norm?
28
+ 'norm' == cluster_status
29
+ end
30
+
31
+ def cluster_norm_or_ptfailed?
32
+ ['norm', 'ptfailed'].include?(cluster_status)
33
+ end
34
+
35
+ def cluster_restart_status?
36
+ ['norm', 'failed', 'ptfailed'].include?(cluster_status)
37
+ end
38
+
39
+ def validate_cluster
40
+ raise Dag::Client::ClusterNotOpen.new("cluster not opened") if @cluster_name.blank?
41
+ raise Dag::Client::StatusInvalid.new("cluster is not valid status: #{cluster_status}") unless valid_cluster_status?
42
+ end
43
+
44
+ VALID_WHERE_KEYS = [
45
+ :status,
46
+ :type,
47
+ :cluster_name
48
+ ]
49
+
50
+ def validate_cluster_param_keys(params)
51
+ params.keys.each do |k|
52
+ unless VALID_WHERE_KEYS.include?(k.to_sym)
53
+ raise Dag::Client::ParameterInvalid.new("Invalid where condition: #{k}")
54
+ end
55
+ end
56
+ end
57
+ end
58
+ end
59
+ end
@@ -0,0 +1,79 @@
1
+ module Dag
2
+ class Client
3
+ module Database
4
+ include Dag::Client::ClusterValidation
5
+
6
+ def databases
7
+ validate_cluster
8
+
9
+ Dag::DatabaseCollection.new(@api, @cluster_name, cluster_status: @cluster_status)
10
+ end
11
+
12
+ def create_database(db_name)
13
+ validate_cluster
14
+
15
+ @api.create_database(@cluster_name, db_name)
16
+ Dag::Database.new(@api, @cluster_name, db_name)
17
+ end
18
+
19
+ def database(db_name)
20
+ validate_cluster
21
+
22
+ raise Dag::Client::ParameterInvalid.new("db_name is blank")if db_name.blank?
23
+
24
+ unless db = databases.detect { |d| d.db_name == db_name }
25
+ raise Dag::Client::DatabaseNotFound.new('Database not found')
26
+ end
27
+
28
+ db
29
+ end
30
+
31
+ def table(db_name, tbl_name)
32
+ validate_cluster
33
+
34
+ raise Dag::Client::ParameterInvalid.new("db_name is blank")if db_name.blank?
35
+ raise Dag::Client::ParameterInvalid.new("tbl_name is blank")if tbl_name.blank?
36
+
37
+ unless table_info = @api.table(@cluster_name, db_name, tbl_name)
38
+ raise Dag::Client::TableNotFound.new('Table not found')
39
+ end
40
+
41
+ Dag::Table.new(@api, @cluster_name, db_name, params: table_info)
42
+ end
43
+
44
+ #
45
+ # == parameters ==
46
+ # * <tt>table</tt> - table name
47
+ # * <tt>format</tt> - 'csv' or 'tsv' or 'json' or 'json_agent'
48
+ # * <tt>schema/tt> - schema
49
+ # * <tt>comment</tt> - comment
50
+ def create_table(db_name, tbl_name, format: nil, schema: nil, comment: nil)
51
+ validate_cluster
52
+
53
+ params = {
54
+ table: tbl_name,
55
+ schema: schema,
56
+ create_api: true
57
+ }
58
+ params.merge!({ format: format }) if format
59
+ params.merge!({ comment: comment }) if comment
60
+
61
+ @api.create_table(@cluster_name, db_name, params: params)
62
+ table_info = @api.table(@cluster_name, db_name, tbl_name)
63
+ Dag::Table.new(@api, @cluster_name, db_name, params: table_info)
64
+ end
65
+
66
+ def split_table(db_name, tbl_name, params)
67
+ unless cluster_norm?
68
+ raise Dag::Client::StatusInvalid.new("cluster status is invalid: #{cluster_status}")
69
+ end
70
+
71
+ split_info = @api.split_table(@cluster_name, db_name, tbl_name, params)
72
+ job_id = split_info['queryId']
73
+
74
+ query_info = @api.query_info(job_id)
75
+ Dag::Job.new(@api, query_info)
76
+ end
77
+ end
78
+ end
79
+ end
@@ -0,0 +1,43 @@
1
+ module Dag
2
+ class Client
3
+ class APIFailure < StandardError
4
+ attr_accessor :api_code
5
+ attr_accessor :api_message
6
+ attr_accessor :api_status
7
+ attr_accessor :api_request_id
8
+ attr_accessor :api_resource
9
+ end
10
+
11
+ # API
12
+ class ParameterInvalid < StandardError
13
+ end
14
+
15
+ class APIOptionInvalid < StandardError
16
+ end
17
+
18
+ class StatusInvalid < StandardError
19
+ end
20
+
21
+ # Job
22
+ class JobTypeInvalid < StandardError
23
+ end
24
+
25
+ # Cluster
26
+ class ClusterNotOpen < StandardError
27
+ end
28
+
29
+ class ClusterRebooted < StandardError
30
+ end
31
+
32
+ # Database
33
+ class DatabaseNotFound < StandardError
34
+ end
35
+
36
+ # Table
37
+ class TableAlreadyExists < StandardError
38
+ end
39
+
40
+ class TableNotFound < StandardError
41
+ end
42
+ end
43
+ end
@@ -0,0 +1,56 @@
1
+ require 'active_support/core_ext/integer/time'
2
+
3
+ module Dag
4
+ class Client
5
+ module Job
6
+
7
+ def jobs
8
+ Dag::JobCollection.new(@api)
9
+ end
10
+
11
+ def job(job_id)
12
+ job_info = @api.query_info(job_id)
13
+ Dag::Job.new(@api, job_info)
14
+ end
15
+
16
+ def job_log(job_id)
17
+
18
+ job = job(job_id)
19
+ job.validate_log_condition
20
+ log_info = @api.query_log(job_id)
21
+ log_info ? log_info['log'] : ''
22
+ end
23
+
24
+ def job_cancel(job_id)
25
+
26
+ job = job(job_id)
27
+ job.validate_cancel_condition
28
+ @api.query_cancel(job_id)
29
+ end
30
+
31
+ #
32
+ # == parameters ==
33
+ # * <tt>query</tt> - query
34
+ # * <tt>output_format</tt> - 'csv' or 'tsv'
35
+ # * <tt>output_resource_path</tt> - "${bucketname}/${output_object}"
36
+ # * <tt>label</tt> - label
37
+ def query(query: '', output_format: 'csv', output_resource_path: '', label: '')
38
+ validate_cluster
39
+
40
+ select_info = @api.query(query: query,
41
+ output_format: output_format,
42
+ output_resource_path: output_resource_path,
43
+ cluster_name: @cluster_name,
44
+ label: label)
45
+ job_id = select_info['queryId']
46
+
47
+ job(job_id)
48
+ end
49
+
50
+ def job_download_urls(job_id)
51
+ job = job(job_id)
52
+ job.download_urls
53
+ end
54
+ end
55
+ end
56
+ end
@@ -0,0 +1,22 @@
1
+ module Dag
2
+ class Client
3
+ module JobValidation
4
+
5
+ VALID_WHERE_KEYS = [
6
+ :status,
7
+ :type,
8
+ :cluster_name,
9
+ :label,
10
+ :cluster_rebooted
11
+ ]
12
+
13
+ def validate_job_param_keys(params)
14
+ params.keys.each do |k|
15
+ unless VALID_WHERE_KEYS.include?(k.to_sym)
16
+ raise Dag::Client::ParameterInvalid.new("Invalid where condition: #{k}")
17
+ end
18
+ end
19
+ end
20
+ end
21
+ end
22
+ end
@@ -0,0 +1,9 @@
1
+ module Dag
2
+ class Model
3
+ def initialize(api)
4
+ @api = api
5
+ end
6
+
7
+ attr_reader :api
8
+ end
9
+ end
@@ -0,0 +1,20 @@
1
+ module Dag
2
+ class Bucket < Model
3
+ def initialize(api, bucket_name)
4
+ super(api)
5
+ @name = bucket_name
6
+ end
7
+
8
+ def name
9
+ @name
10
+ end
11
+
12
+ def delete
13
+ @api.delete_bucket(@name)
14
+ end
15
+
16
+ def objects(prefix: nil)
17
+ Dag::ObjectCollection.new(@api, @name)
18
+ end
19
+ end
20
+ end
@@ -0,0 +1,34 @@
1
+ module Dag
2
+ class BucketCollection < Model
3
+ include Enumerable
4
+
5
+ def create(bucket_name)
6
+ @api.create_bucket(bucket_name)
7
+ bucket_named(bucket_name)
8
+ end
9
+
10
+ # @example
11
+ #
12
+ # bucket = client.buckets[:mybucket],
13
+ # bucket = client.buckets['mybucket'],
14
+ #
15
+ # @param [String] bucket_name
16
+ # @return [Bucket]
17
+ def [] bucket_name
18
+ bucket_named(bucket_name)
19
+ end
20
+
21
+ def each
22
+ buckets = @api.buckets.buckets
23
+ buckets.each do |bucket|
24
+ yield bucket_named(bucket)
25
+ end
26
+ end
27
+
28
+ private
29
+
30
+ def bucket_named(bucket)
31
+ Dag::Bucket.new(@api, bucket.to_s)
32
+ end
33
+ end
34
+ end
@@ -0,0 +1,100 @@
1
+ require 'active_support/core_ext/string/inflections'
2
+ require 'ostruct'
3
+
4
+ module Dag
5
+ class Cluster < Model
6
+ include Dag::Client::ClusterValidation
7
+
8
+ def initialize(api, cluster)
9
+ super(api)
10
+
11
+ @name = cluster['name']
12
+ @status = cluster['status']
13
+ @cluster_status = @status
14
+ @type = cluster['type']
15
+ @instances = cluster['instances']
16
+ @debug = cluster['debug']
17
+ end
18
+
19
+ attr_reader :name, :status, :type, :debug
20
+
21
+ # restart cluster
22
+ #
23
+ # ==== Parameters
24
+ # * <tt>:force</tt> - restart forcely. false by default
25
+ # * <tt>:type</tt> - cluster type 'DAG5-Hive-Hadoop'
26
+ def restart(force: false, type: nil)
27
+ unless cluster_restart_status?
28
+ raise Dag::Client::StatusInvalid.new("cluster status is invalid: #{cluster_status}")
29
+ end
30
+
31
+ params = {
32
+ force: force,
33
+ type: type || @type,
34
+ debug: false
35
+ }
36
+
37
+ @api.cluster_restart(@name, params)
38
+ end
39
+
40
+ def export_log(params = {})
41
+ unless cluster_norm_or_ptfailed?
42
+ raise Dag::Client::StatusInvalid.new("cluster status is invalid: #{cluster_status}")
43
+ end
44
+
45
+ default = {
46
+ compress: false
47
+ }
48
+
49
+ @api.cluster_export_log(@name, default.merge(params))
50
+ end
51
+
52
+ # return instance infomation
53
+ #
54
+ # == instance detail
55
+ # * <tt>grade</tt>
56
+ # * <tt>quantity</tt>
57
+ def instances
58
+ @instances.map { |instance| os = OpenStruct.new(instance); os.freeze; os }
59
+ end
60
+
61
+ # return statistics information
62
+ #
63
+ # == instance detail ==
64
+ # * <tt>instances</tt>
65
+ # * <tt>instance_id</tt>
66
+ # * <tt>grade</tt>
67
+ # * <tt>disk</tt>
68
+ # * <tt>dfs_used</tt>
69
+ # * <tt>non_dfs_used</tt>
70
+ # * <tt>capacity</tt>
71
+ # * <tt>disk</tt>
72
+ # * <tt>capacity</tt>
73
+ # * <tt>used</tt>
74
+ def statistics
75
+ statistics_info = if valid_cluster_status? && cluster_norm_or_ptfailed?
76
+ @api.statistics(@name)
77
+ end
78
+
79
+ if statistics_info.present?
80
+ statistics_info = {
81
+ "instances" => statistics_info['instances'],
82
+ "disk" => statistics_info['disk']
83
+ }
84
+ end
85
+
86
+ return nil unless statistics_info
87
+ key_to_underscore(statistics_info)
88
+ end
89
+
90
+ private
91
+
92
+ def key_to_underscore(hash)
93
+ OpenStruct.new(hash.each_with_object({}) {|(k, v), new_hash|
94
+ v = v.map {|s| send(__method__, s) } if v.class == Array
95
+ v = send(__method__, v) if v.class == Hash
96
+ new_hash[k.underscore] = v
97
+ }).freeze
98
+ end
99
+ end
100
+ end