td-client 0.8.67 → 0.8.68

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,62 @@
1
+ class TreasureData::API
2
+ module AccessControl
3
+
4
+ ####
5
+ ## Access Control API
6
+ ##
7
+
8
+ def grant_access_control(subject, action, scope, grant_option)
9
+ params = {'subject'=>subject, 'action'=>action, 'scope'=>scope, 'grant_option'=>grant_option.to_s}
10
+ code, body, res = post("/v3/acl/grant", params)
11
+ if code != "200"
12
+ raise_error("Granting access control failed", res)
13
+ end
14
+ return true
15
+ end
16
+
17
+ def revoke_access_control(subject, action, scope)
18
+ params = {'subject'=>subject, 'action'=>action, 'scope'=>scope}
19
+ code, body, res = post("/v3/acl/revoke", params)
20
+ if code != "200"
21
+ raise_error("Revoking access control failed", res)
22
+ end
23
+ return true
24
+ end
25
+
26
+ # [true, [{subject:String,action:String,scope:String}]]
27
+ def test_access_control(user, action, scope)
28
+ params = {'user'=>user, 'action'=>action, 'scope'=>scope}
29
+ code, body, res = get("/v3/acl/test", params)
30
+ if code != "200"
31
+ raise_error("Testing access control failed", res)
32
+ end
33
+ js = checked_json(body, %w[permission access_controls])
34
+ perm = js["permission"]
35
+ acl = js["access_controls"].map {|roleinfo|
36
+ subject = roleinfo['subject']
37
+ action = roleinfo['action']
38
+ scope = roleinfo['scope']
39
+ [name, action, scope]
40
+ }
41
+ return perm, acl
42
+ end
43
+
44
+ # [{subject:String,action:String,scope:String}]
45
+ def list_access_controls
46
+ code, body, res = get("/v3/acl/list")
47
+ if code != "200"
48
+ raise_error("Listing access control failed", res)
49
+ end
50
+ js = checked_json(body, %w[access_controls])
51
+ acl = js["access_controls"].map {|roleinfo|
52
+ subject = roleinfo['subject']
53
+ action = roleinfo['action']
54
+ scope = roleinfo['scope']
55
+ grant_option = roleinfo['grant_option']
56
+ [subject, action, scope, grant_option]
57
+ }
58
+ return acl
59
+ end
60
+
61
+ end
62
+ end
@@ -0,0 +1,41 @@
1
+ class TreasureData::API
2
+ module Account
3
+
4
+ ####
5
+ ## Account API
6
+ ##
7
+
8
+ def show_account
9
+ code, body, res = get("/v3/account/show")
10
+ if code != "200"
11
+ raise_error("Show account failed", res)
12
+ end
13
+ js = checked_json(body, %w[account])
14
+ a = js["account"]
15
+ account_id = a['id'].to_i
16
+ plan = a['plan'].to_i
17
+ storage_size = a['storage_size'].to_i
18
+ guaranteed_cores = a['guaranteed_cores'].to_i
19
+ maximum_cores = a['maximum_cores'].to_i
20
+ created_at = a['created_at']
21
+ return [account_id, plan, storage_size, guaranteed_cores, maximum_cores, created_at]
22
+ end
23
+
24
+ def account_core_utilization(from, to)
25
+ params = { }
26
+ params['from'] = from.to_s if from
27
+ params['to'] = to.to_s if to
28
+ code, body, res = get("/v3/account/core_utilization", params)
29
+ if code != "200"
30
+ raise_error("Show account failed", res)
31
+ end
32
+ js = checked_json(body, %w[from to interval history])
33
+ from = Time.parse(js['from']).utc
34
+ to = Time.parse(js['to']).utc
35
+ interval = js['interval'].to_i
36
+ history = js['history']
37
+ return [from, to, interval, history]
38
+ end
39
+
40
+ end
41
+ end
@@ -0,0 +1,154 @@
1
+ class TreasureData::API
2
+ module BulkImport
3
+
4
+ ####
5
+ ## Bulk import API
6
+ ##
7
+
8
+ # => nil
9
+ def create_bulk_import(name, db, table, opts={})
10
+ params = opts.dup
11
+ code, body, res = post("/v3/bulk_import/create/#{e name}/#{e db}/#{e table}", params)
12
+ if code != "200"
13
+ raise_error("Create bulk import failed", res)
14
+ end
15
+ return nil
16
+ end
17
+
18
+ # => nil
19
+ def delete_bulk_import(name, opts={})
20
+ params = opts.dup
21
+ code, body, res = post("/v3/bulk_import/delete/#{e name}", params)
22
+ if code != "200"
23
+ raise_error("Delete bulk import failed", res)
24
+ end
25
+ return nil
26
+ end
27
+
28
+ # => data:Hash
29
+ def show_bulk_import(name)
30
+ code, body, res = get("/v3/bulk_import/show/#{name}")
31
+ if code != "200"
32
+ raise_error("Show bulk import failed", res)
33
+ end
34
+ js = checked_json(body, %w[status])
35
+ return js
36
+ end
37
+
38
+ # => result:[data:Hash]
39
+ def list_bulk_imports(opts={})
40
+ params = opts.dup
41
+ code, body, res = get("/v3/bulk_import/list", params)
42
+ if code != "200"
43
+ raise_error("List bulk imports failed", res)
44
+ end
45
+ js = checked_json(body, %w[bulk_imports])
46
+ return js['bulk_imports']
47
+ end
48
+
49
+ def list_bulk_import_parts(name, opts={})
50
+ params = opts.dup
51
+ code, body, res = get("/v3/bulk_import/list_parts/#{e name}", params)
52
+ if code != "200"
53
+ raise_error("List bulk import parts failed", res)
54
+ end
55
+ js = checked_json(body, %w[parts])
56
+ return js['parts']
57
+ end
58
+
59
+ # => nil
60
+ def bulk_import_upload_part(name, part_name, stream, size, opts={})
61
+ code, body, res = put("/v3/bulk_import/upload_part/#{e name}/#{e part_name}", stream, size)
62
+ if code[0] != ?2
63
+ raise_error("Upload a part failed", res)
64
+ end
65
+ return nil
66
+ end
67
+
68
+ # => nil
69
+ def bulk_import_delete_part(name, part_name, opts={})
70
+ params = opts.dup
71
+ code, body, res = post("/v3/bulk_import/delete_part/#{e name}/#{e part_name}", params)
72
+ if code[0] != ?2
73
+ raise_error("Delete a part failed", res)
74
+ end
75
+ return nil
76
+ end
77
+
78
+ # => nil
79
+ def freeze_bulk_import(name, opts={})
80
+ params = opts.dup
81
+ code, body, res = post("/v3/bulk_import/freeze/#{e name}", params)
82
+ if code != "200"
83
+ raise_error("Freeze bulk import failed", res)
84
+ end
85
+ return nil
86
+ end
87
+
88
+ # => nil
89
+ def unfreeze_bulk_import(name, opts={})
90
+ params = opts.dup
91
+ code, body, res = post("/v3/bulk_import/unfreeze/#{e name}", params)
92
+ if code != "200"
93
+ raise_error("Unfreeze bulk import failed", res)
94
+ end
95
+ return nil
96
+ end
97
+
98
+ # => jobId:String
99
+ def perform_bulk_import(name, opts={})
100
+ params = opts.dup
101
+ code, body, res = post("/v3/bulk_import/perform/#{e name}", params)
102
+ if code != "200"
103
+ raise_error("Perform bulk import failed", res)
104
+ end
105
+ js = checked_json(body, %w[job_id])
106
+ return js['job_id'].to_s
107
+ end
108
+
109
+ # => nil
110
+ def commit_bulk_import(name, opts={})
111
+ params = opts.dup
112
+ code, body, res = post("/v3/bulk_import/commit/#{e name}", params)
113
+ if code != "200"
114
+ raise_error("Commit bulk import failed", res)
115
+ end
116
+ return nil
117
+ end
118
+
119
+ # => data...
120
+ def bulk_import_error_records(name, opts={}, &block)
121
+ params = opts.dup
122
+ code, body, res = get("/v3/bulk_import/error_records/#{e name}", params)
123
+ if code != "200"
124
+ raise_error("Failed to get bulk import error records", res)
125
+ end
126
+ if body.nil? || body.empty?
127
+ if block
128
+ return nil
129
+ else
130
+ return []
131
+ end
132
+ end
133
+ require File.expand_path('../compat_gzip_reader', File.dirname(__FILE__))
134
+ u = MessagePack::Unpacker.new(Zlib::GzipReader.new(StringIO.new(body)))
135
+ if block
136
+ begin
137
+ u.each(&block)
138
+ rescue EOFError
139
+ end
140
+ nil
141
+ else
142
+ result = []
143
+ begin
144
+ u.each {|row|
145
+ result << row
146
+ }
147
+ rescue EOFError
148
+ end
149
+ return result
150
+ end
151
+ end
152
+
153
+ end
154
+ end
@@ -0,0 +1,47 @@
1
+ class TreasureData::API
2
+ module Database
3
+
4
+ ####
5
+ ## Database API
6
+ ##
7
+
8
+ # => [name:String]
9
+ def list_databases
10
+ code, body, res = get("/v3/database/list")
11
+ if code != "200"
12
+ raise_error("List databases failed", res)
13
+ end
14
+ js = checked_json(body, %w[databases])
15
+ result = {}
16
+ js["databases"].each {|m|
17
+ name = m['name']
18
+ count = m['count']
19
+ created_at = m['created_at']
20
+ updated_at = m['updated_at']
21
+ permission = m['permission']
22
+ result[name] = [count, created_at, updated_at, nil, permission] # set nil to org for API compatibiilty
23
+ }
24
+ return result
25
+ end
26
+
27
+ # => true
28
+ def delete_database(db)
29
+ code, body, res = post("/v3/database/delete/#{e db}")
30
+ if code != "200"
31
+ raise_error("Delete database failed", res)
32
+ end
33
+ return true
34
+ end
35
+
36
+ # => true
37
+ def create_database(db, opts={})
38
+ params = opts.dup
39
+ code, body, res = post("/v3/database/create/#{e db}", params)
40
+ if code != "200"
41
+ raise_error("Create database failed", res)
42
+ end
43
+ return true
44
+ end
45
+
46
+ end
47
+ end
@@ -0,0 +1,21 @@
1
+ class TreasureData::API
2
+ module Export
3
+
4
+ ####
5
+ ## Export API
6
+ ##
7
+
8
+ # => jobId:String
9
+ def export(db, table, storage_type, opts={})
10
+ params = opts.dup
11
+ params['storage_type'] = storage_type
12
+ code, body, res = post("/v3/export/run/#{e db}/#{e table}", params)
13
+ if code != "200"
14
+ raise_error("Export failed", res)
15
+ end
16
+ js = checked_json(body, %w[job_id])
17
+ return js['job_id'].to_s
18
+ end
19
+
20
+ end
21
+ end
@@ -0,0 +1,31 @@
1
+ class TreasureData::API
2
+ module Import
3
+
4
+ ####
5
+ ## Import API
6
+ ##
7
+
8
+ # => time:Float
9
+ def import(db, table, format, stream, size, unique_id=nil)
10
+ if unique_id
11
+ path = "/v3/table/import_with_id/#{e db}/#{e table}/#{unique_id}/#{format}"
12
+ else
13
+ path = "/v3/table/import/#{e db}/#{e table}/#{format}"
14
+ end
15
+ opts = {}
16
+ if @host == DEFAULT_ENDPOINT
17
+ opts[:host] = DEFAULT_IMPORT_ENDPOINT
18
+ elsif @host == NEW_DEFAULT_ENDPOINT
19
+ opts[:host] = NEW_DEFAULT_IMPORT_ENDPOINT
20
+ end
21
+ code, body, res = put(path, stream, size, opts)
22
+ if code[0] != ?2
23
+ raise_error("Import failed", res)
24
+ end
25
+ js = checked_json(body, %w[])
26
+ time = js['elapsed_time'].to_f
27
+ return time
28
+ end
29
+
30
+ end
31
+ end
@@ -0,0 +1,251 @@
1
+ class TreasureData::API
2
+ module Job
3
+
4
+ ####
5
+ ## Job API
6
+ ##
7
+
8
+ # => [(jobId:String, type:Symbol, status:String, start_at:String, end_at:String, result_url:String)]
9
+ def list_jobs(from=0, to=nil, status=nil, conditions=nil)
10
+ params = {}
11
+ params['from'] = from.to_s if from
12
+ params['to'] = to.to_s if to
13
+ params['status'] = status.to_s if status
14
+ params.merge!(conditions) if conditions
15
+ code, body, res = get("/v3/job/list", params)
16
+ if code != "200"
17
+ raise_error("List jobs failed", res)
18
+ end
19
+ js = checked_json(body, %w[jobs])
20
+ result = []
21
+ js['jobs'].each {|m|
22
+ job_id = m['job_id']
23
+ type = (m['type'] || '?').to_sym
24
+ database = m['database']
25
+ status = m['status']
26
+ query = m['query']
27
+ start_at = m['start_at']
28
+ end_at = m['end_at']
29
+ cpu_time = m['cpu_time']
30
+ result_size = m['result_size'] # compressed result size in msgpack.gz format
31
+ result_url = m['result']
32
+ priority = m['priority']
33
+ retry_limit = m['retry_limit']
34
+ result << [job_id, type, status, query, start_at, end_at, cpu_time,
35
+ result_size, result_url, priority, retry_limit, nil, database]
36
+ }
37
+ return result
38
+ end
39
+
40
+ # => (type:Symbol, status:String, result:String, url:String, result:String)
41
+ def show_job(job_id)
42
+ # use v3/job/status instead of v3/job/show to poll finish of a job
43
+ code, body, res = get("/v3/job/show/#{e job_id}")
44
+ if code != "200"
45
+ raise_error("Show job failed", res)
46
+ end
47
+ js = checked_json(body, %w[status])
48
+ # TODO debug
49
+ type = (js['type'] || '?').to_sym # TODO
50
+ database = js['database']
51
+ query = js['query']
52
+ status = js['status']
53
+ debug = js['debug']
54
+ url = js['url']
55
+ start_at = js['start_at']
56
+ end_at = js['end_at']
57
+ cpu_time = js['cpu_time']
58
+ result_size = js['result_size'] # compressed result size in msgpack.gz format
59
+ result = js['result'] # result target URL
60
+ hive_result_schema = (js['hive_result_schema'] || '')
61
+ if hive_result_schema.empty?
62
+ hive_result_schema = nil
63
+ else
64
+ begin
65
+ hive_result_schema = JSON.parse(hive_result_schema)
66
+ rescue JSON::ParserError => e
67
+ # this is a workaround for a Known Limitation in the Pig Engine which does not set a default, auto-generated
68
+ # column name for anonymous columns (such as the ones that are generated from UDF like COUNT or SUM).
69
+ # The schema will contain 'nil' for the name of those columns and that breaks the JSON parser since it violates
70
+ # the JSON syntax standard.
71
+ if type == :pig and hive_result_schema !~ /[\{\}]/
72
+ begin
73
+ # NOTE: this works because a JSON 2 dimensional array is the same as a Ruby one.
74
+ # Any change in the format for the hive_result_schema output may cause a syntax error, in which case
75
+ # this lame attempt at fixing the problem will fail and we will be raising the original JSON exception
76
+ hive_result_schema = eval(hive_result_schema)
77
+ rescue SyntaxError => ignored_e
78
+ raise e
79
+ end
80
+ hive_result_schema.each_with_index {|col_schema, idx|
81
+ if col_schema[0].nil?
82
+ col_schema[0] = "_col#{idx}"
83
+ end
84
+ }
85
+ else
86
+ raise e
87
+ end
88
+ end
89
+ end
90
+ priority = js['priority']
91
+ retry_limit = js['retry_limit']
92
+ return [type, query, status, url, debug, start_at, end_at, cpu_time,
93
+ result_size, result, hive_result_schema, priority, retry_limit, nil, database]
94
+ end
95
+
96
+ def job_status(job_id)
97
+ code, body, res = get("/v3/job/status/#{e job_id}")
98
+ if code != "200"
99
+ raise_error("Get job status failed", res)
100
+ end
101
+
102
+ js = checked_json(body, %w[status])
103
+ return js['status']
104
+ end
105
+
106
+ def job_result(job_id)
107
+ code, body, res = get("/v3/job/result/#{e job_id}", {'format'=>'msgpack'})
108
+ if code != "200"
109
+ raise_error("Get job result failed", res)
110
+ end
111
+ result = []
112
+ MessagePack::Unpacker.new.feed_each(body) {|row|
113
+ result << row
114
+ }
115
+ return result
116
+ end
117
+
118
+ # block is optional and must accept 1 parameter
119
+ def job_result_format(job_id, format, io=nil, &block)
120
+ if io
121
+ code, body, res = get("/v3/job/result/#{e job_id}", {'format'=>format}) {|res|
122
+ if res.code != "200"
123
+ raise_error("Get job result failed", res)
124
+ end
125
+
126
+ if ce = res.header['Content-Encoding']
127
+ res.extend(DeflateReadBodyMixin)
128
+ res.gzip = true if ce == 'gzip'
129
+ else
130
+ res.extend(DirectReadBodyMixin)
131
+ end
132
+
133
+ res.extend(DirectReadBodyMixin)
134
+ if ce = res.header['Content-Encoding']
135
+ if ce == 'gzip'
136
+ infl = Zlib::Inflate.new(Zlib::MAX_WBITS + 16)
137
+ else
138
+ infl = Zlib::Inflate.new
139
+ end
140
+ end
141
+
142
+ total_compr_size = 0
143
+ res.each_fragment {|fragment|
144
+ total_compr_size += fragment.size
145
+ # uncompressed if the 'Content-Enconding' header is set in response
146
+ fragment = infl.inflate(fragment) if ce
147
+ io.write(fragment)
148
+ block.call(total_compr_size) if block_given?
149
+ }
150
+ }
151
+ nil
152
+ else
153
+ code, body, res = get("/v3/job/result/#{e job_id}", {'format'=>format})
154
+ if res.code != "200"
155
+ raise_error("Get job result failed", res)
156
+ end
157
+ body
158
+ end
159
+ end
160
+
161
+ # block is optional and must accept 1 argument
162
+ def job_result_each(job_id, &block)
163
+ get("/v3/job/result/#{e job_id}", {'format'=>'msgpack'}) {|res|
164
+ if res.code != "200"
165
+ raise_error("Get job result failed", res)
166
+ end
167
+
168
+ # default to decompressing the response since format is fixed to 'msgpack'
169
+ res.extend(DeflateReadBodyMixin)
170
+ res.gzip = (res.header['Content-Encoding'] == 'gzip')
171
+ upkr = MessagePack::Unpacker.new
172
+ res.each_fragment {|inflated_fragment|
173
+ upkr.feed_each(inflated_fragment, &block)
174
+ }
175
+ }
176
+ nil
177
+ end
178
+
179
+ # block is optional and must accept 1 argument
180
+ def job_result_each_with_compr_size(job_id, &block)
181
+ get("/v3/job/result/#{e job_id}", {'format'=>'msgpack'}) {|res|
182
+ if res.code != "200"
183
+ raise_error("Get job result failed", res)
184
+ end
185
+
186
+ res.extend(DirectReadBodyMixin)
187
+ if res.header['Content-Encoding'] == 'gzip'
188
+ infl = Zlib::Inflate.new(Zlib::MAX_WBITS + 16)
189
+ else
190
+ infl = Zlib::Inflate.new
191
+ end
192
+ upkr = MessagePack::Unpacker.new
193
+ begin
194
+ total_compr_size = 0
195
+ res.each_fragment {|fragment|
196
+ total_compr_size += fragment.size
197
+ upkr.feed_each(infl.inflate(fragment)) {|unpacked|
198
+ block.call(unpacked, total_compr_size) if block_given?
199
+ }
200
+ }
201
+ ensure
202
+ infl.close
203
+ end
204
+ }
205
+ nil
206
+ end
207
+
208
+ def job_result_raw(job_id, format)
209
+ code, body, res = get("/v3/job/result/#{e job_id}", {'format'=>format})
210
+ if code != "200"
211
+ raise_error("Get job result failed", res)
212
+ end
213
+ return body
214
+ end
215
+
216
+ def kill(job_id)
217
+ code, body, res = post("/v3/job/kill/#{e job_id}")
218
+ if code != "200"
219
+ raise_error("Kill job failed", res)
220
+ end
221
+ js = checked_json(body, %w[])
222
+ former_status = js['former_status']
223
+ return former_status
224
+ end
225
+
226
+ # => jobId:String
227
+ def hive_query(q, db=nil, result_url=nil, priority=nil, retry_limit=nil, opts={})
228
+ query(q, :hive, db, result_url, priority, retry_limit, opts)
229
+ end
230
+
231
+ # => jobId:String
232
+ def pig_query(q, db=nil, result_url=nil, priority=nil, retry_limit=nil, opts={})
233
+ query(q, :pig, db, result_url, priority, retry_limit, opts)
234
+ end
235
+
236
+ # => jobId:String
237
+ def query(q, type=:hive, db=nil, result_url=nil, priority=nil, retry_limit=nil, opts={})
238
+ params = {'query' => q}.merge(opts)
239
+ params['result'] = result_url if result_url
240
+ params['priority'] = priority if priority
241
+ params['retry_limit'] = retry_limit if retry_limit
242
+ code, body, res = post("/v3/job/issue/#{type}/#{e db}", params)
243
+ if code != "200"
244
+ raise_error("Query failed", res)
245
+ end
246
+ js = checked_json(body, %w[job_id])
247
+ return js['job_id'].to_s
248
+ end
249
+
250
+ end
251
+ end