td-client 0.8.62 → 0.8.63
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +8 -8
- data/lib/td/client.rb +23 -16
- data/lib/td/client/api.rb +97 -31
- data/lib/td/client/model.rb +59 -15
- data/lib/td/client/version.rb +1 -1
- data/spec/spec_helper.rb +1 -0
- data/spec/td/client/api_spec.rb +1 -5
- data/spec/td/client/bulk_import_spec.rb +5 -5
- data/spec/td/client/db_api_spec.rb +91 -6
- data/spec/td/client/export_api_spec.rb +1 -1
- data/spec/td/client/job_api_spec.rb +38 -5
- data/spec/td/client/partial_delete_api_spec.rb +2 -2
- data/spec/td/client/result_api_spec.rb +3 -3
- data/spec/td/client/sched_api_spec.rb +36 -3
- data/spec/td/client/spec_resources.rb +21 -15
- data/spec/td/client/table_api_spec.rb +165 -0
- metadata +4 -4
- data/spec/api_spec.rb +0 -72
checksums.yaml
CHANGED
@@ -1,15 +1,15 @@
|
|
1
1
|
---
|
2
2
|
!binary "U0hBMQ==":
|
3
3
|
metadata.gz: !binary |-
|
4
|
-
|
4
|
+
MDBjNmRmZmY5MjZkMGIxY2ZmZjEzMzU3NjhlNDMxYTBhMDc0YzUxMw==
|
5
5
|
data.tar.gz: !binary |-
|
6
|
-
|
6
|
+
MjE5ZTgwOTI1OGM4NjEyODdjMDNlYmM5NmQ4NGU5ZTVlYmNkMDMwYg==
|
7
7
|
SHA512:
|
8
8
|
metadata.gz: !binary |-
|
9
|
-
|
10
|
-
|
11
|
-
|
9
|
+
ZDZmNjY2OTkyODExOWUwYmMyNDE0ZGEwMmU5MzU4OTkzMmYzZTMwMzY3MzEw
|
10
|
+
ZjIxZjMyYTkxZDlmYWFjODEwODA3ZmU0MDc0NjE1NzQzMDkxMWViYTQxZWE5
|
11
|
+
YzRlYzRlOGYwYmYzZTlmNDgzYzQ4YmUzYzAxZDVjYjY4N2VjYzk=
|
12
12
|
data.tar.gz: !binary |-
|
13
|
-
|
14
|
-
|
15
|
-
|
13
|
+
Zjk1ZDViZGZkOGI2YmE4ZGM2N2ZjMzFkNmNhMDNmMzFkZDY1YzAwYTU1ZmU5
|
14
|
+
MjUwYjRlNjhkYmM5ZmQwZWNmYjM4YmExZDllNzhlZWMwNjZjMjhiZTk5YjVl
|
15
|
+
NzA1NTkyM2FkZjk3ZGQ4ZjRmOWYzNmFmZTMwMTgwM2NlN2YzNGQ=
|
data/lib/td/client.rb
CHANGED
@@ -1,4 +1,3 @@
|
|
1
|
-
|
2
1
|
module TreasureData
|
3
2
|
|
4
3
|
require 'td/client/api'
|
@@ -55,17 +54,17 @@ class Client
|
|
55
54
|
# => [Database]
|
56
55
|
def databases
|
57
56
|
m = @api.list_databases
|
58
|
-
m.map {|db_name,(count,created_at,updated_at,org)|
|
59
|
-
Database.new(self, db_name, nil, count, created_at, updated_at, org)
|
57
|
+
m.map {|db_name,(count, created_at, updated_at, org, permission)|
|
58
|
+
Database.new(self, db_name, nil, count, created_at, updated_at, org, permission)
|
60
59
|
}
|
61
60
|
end
|
62
61
|
|
63
62
|
# => Database
|
64
63
|
def database(db_name)
|
65
64
|
m = @api.list_databases
|
66
|
-
m.each {|name,(count,created_at,updated_at,org)|
|
65
|
+
m.each {|name,(count, created_at, updated_at, org, permission)|
|
67
66
|
if name == db_name
|
68
|
-
return Database.new(self, name, nil, count, created_at, updated_at, org)
|
67
|
+
return Database.new(self, name, nil, count, created_at, updated_at, org, permission)
|
69
68
|
end
|
70
69
|
}
|
71
70
|
raise NotFoundError, "Database '#{db_name}' does not exist"
|
@@ -129,24 +128,28 @@ class Client
|
|
129
128
|
def query(db_name, q, result_url=nil, priority=nil, retry_limit=nil, opts={})
|
130
129
|
# for compatibility, assume type is hive unless specifically specified
|
131
130
|
type = opts[:type] || opts['type'] || :hive
|
132
|
-
raise ArgumentError, "The specified query type is not supported: #{type}" unless [:hive, :pig, :impala, :presto].include?
|
131
|
+
raise ArgumentError, "The specified query type is not supported: #{type}" unless [:hive, :pig, :impala, :presto].include?(type)
|
133
132
|
job_id = @api.query(q, type, db_name, result_url, priority, retry_limit, opts)
|
134
133
|
Job.new(self, job_id, type, q)
|
135
134
|
end
|
136
135
|
|
137
136
|
# => [Job]
|
138
137
|
def jobs(from=nil, to=nil, status=nil, conditions=nil)
|
139
|
-
|
140
|
-
|
141
|
-
|
138
|
+
results = @api.list_jobs(from, to, status, conditions)
|
139
|
+
results.map {|job_id, type, status, query, start_at, end_at, cpu_time,
|
140
|
+
result_size, result_url, priority, retry_limit, org, db|
|
141
|
+
Job.new(self, job_id, type, query, status, nil, nil, start_at, end_at, cpu_time,
|
142
|
+
result_size, nil, result_url, nil, priority, retry_limit, org, db)
|
142
143
|
}
|
143
144
|
end
|
144
145
|
|
145
146
|
# => Job
|
146
147
|
def job(job_id)
|
147
148
|
job_id = job_id.to_s
|
148
|
-
type, query, status, url, debug, start_at, end_at, cpu_time,
|
149
|
-
|
149
|
+
type, query, status, url, debug, start_at, end_at, cpu_time,
|
150
|
+
result_size, result_url, hive_result_schema, priority, retry_limit, org, db = @api.show_job(job_id)
|
151
|
+
Job.new(self, job_id, type, query, status, url, debug, start_at, end_at, cpu_time,
|
152
|
+
result_size, nil, result_url, hive_result_schema, priority, retry_limit, org, db)
|
150
153
|
end
|
151
154
|
|
152
155
|
# => status:String
|
@@ -160,8 +163,8 @@ class Client
|
|
160
163
|
end
|
161
164
|
|
162
165
|
# => result:String
|
163
|
-
def job_result_format(job_id, format, io=nil)
|
164
|
-
@api.job_result_format(job_id, format, io)
|
166
|
+
def job_result_format(job_id, format, io=nil, &block)
|
167
|
+
@api.job_result_format(job_id, format, io, &block)
|
165
168
|
end
|
166
169
|
|
167
170
|
# => nil
|
@@ -169,6 +172,11 @@ class Client
|
|
169
172
|
@api.job_result_each(job_id, &block)
|
170
173
|
end
|
171
174
|
|
175
|
+
# => nil
|
176
|
+
def job_result_each_with_compr_size(job_id, &block)
|
177
|
+
@api.job_result_each_with_compr_size(job_id, &block)
|
178
|
+
end
|
179
|
+
|
172
180
|
# => former_status:String
|
173
181
|
def kill(job_id)
|
174
182
|
@api.kill(job_id)
|
@@ -351,7 +359,7 @@ class Client
|
|
351
359
|
|
352
360
|
# => true
|
353
361
|
def remove_apikey(user, apikey)
|
354
|
-
@api.remove_apikey(user,
|
362
|
+
@api.remove_apikey(user, g)
|
355
363
|
end
|
356
364
|
|
357
365
|
# => true
|
@@ -388,5 +396,4 @@ class Client
|
|
388
396
|
end
|
389
397
|
end
|
390
398
|
|
391
|
-
|
392
|
-
end
|
399
|
+
end # module TreasureData
|
data/lib/td/client/api.rb
CHANGED
@@ -2,19 +2,26 @@ require 'td/client/version'
|
|
2
2
|
|
3
3
|
module TreasureData
|
4
4
|
|
5
|
+
class ParameterValidationError < StandardError
|
6
|
+
end
|
5
7
|
|
8
|
+
# Generic API error
|
6
9
|
class APIError < StandardError
|
7
10
|
end
|
8
11
|
|
9
|
-
|
12
|
+
# 401 API errors
|
13
|
+
class AuthError < APIError
|
10
14
|
end
|
11
15
|
|
12
|
-
|
16
|
+
# 403 API errors, used for database permissions
|
17
|
+
class ForbiddenError < APIError
|
13
18
|
end
|
14
19
|
|
20
|
+
# 409 API errors
|
15
21
|
class AlreadyExistsError < APIError
|
16
22
|
end
|
17
23
|
|
24
|
+
# 404 API errors
|
18
25
|
class NotFoundError < APIError
|
19
26
|
end
|
20
27
|
|
@@ -154,8 +161,8 @@ class API
|
|
154
161
|
if name.length < 3
|
155
162
|
name += "_" * (3 - name.length)
|
156
163
|
end
|
157
|
-
if
|
158
|
-
name = name[0,
|
164
|
+
if 255 < name.length
|
165
|
+
name = name[0, 253] + "__"
|
159
166
|
end
|
160
167
|
name = name.downcase
|
161
168
|
name = name.gsub(/[^a-z0-9_]/, '_')
|
@@ -248,7 +255,8 @@ class API
|
|
248
255
|
count = m['count']
|
249
256
|
created_at = m['created_at']
|
250
257
|
updated_at = m['updated_at']
|
251
|
-
|
258
|
+
permission = m['permission']
|
259
|
+
result[name] = [count, created_at, updated_at, nil, permission] # set nil to org for API compatibiilty
|
252
260
|
}
|
253
261
|
return result
|
254
262
|
end
|
@@ -419,10 +427,12 @@ class API
|
|
419
427
|
start_at = m['start_at']
|
420
428
|
end_at = m['end_at']
|
421
429
|
cpu_time = m['cpu_time']
|
430
|
+
result_size = m['result_size'] # compressed result size in msgpack.gz format
|
422
431
|
result_url = m['result']
|
423
432
|
priority = m['priority']
|
424
433
|
retry_limit = m['retry_limit']
|
425
|
-
result << [job_id, type, status, query, start_at, end_at, cpu_time,
|
434
|
+
result << [job_id, type, status, query, start_at, end_at, cpu_time,
|
435
|
+
result_size, result_url, priority, retry_limit, nil, database]
|
426
436
|
}
|
427
437
|
return result
|
428
438
|
end
|
@@ -445,7 +455,8 @@ class API
|
|
445
455
|
start_at = js['start_at']
|
446
456
|
end_at = js['end_at']
|
447
457
|
cpu_time = js['cpu_time']
|
448
|
-
|
458
|
+
result_size = js['result_size'] # compressed result size in msgpack.gz format
|
459
|
+
result = js['result'] # result target URL
|
449
460
|
hive_result_schema = (js['hive_result_schema'] || '')
|
450
461
|
if hive_result_schema.empty?
|
451
462
|
hive_result_schema = nil
|
@@ -478,8 +489,8 @@ class API
|
|
478
489
|
end
|
479
490
|
priority = js['priority']
|
480
491
|
retry_limit = js['retry_limit']
|
481
|
-
return [type, query, status, url, debug, start_at, end_at, cpu_time,
|
482
|
-
hive_result_schema, priority, retry_limit, nil, database]
|
492
|
+
return [type, query, status, url, debug, start_at, end_at, cpu_time,
|
493
|
+
result_size, result, hive_result_schema, priority, retry_limit, nil, database]
|
483
494
|
end
|
484
495
|
|
485
496
|
def job_status(job_id)
|
@@ -505,14 +516,38 @@ class API
|
|
505
516
|
return result
|
506
517
|
end
|
507
518
|
|
508
|
-
|
519
|
+
# block is optional and must accept 1 parameter
|
520
|
+
def job_result_format(job_id, format, io=nil, &block)
|
509
521
|
if io
|
510
522
|
code, body, res = get("/v3/job/result/#{e job_id}", {'format'=>format}) {|res|
|
511
523
|
if res.code != "200"
|
512
524
|
raise_error("Get job result failed", res)
|
513
525
|
end
|
526
|
+
|
527
|
+
if ce = res.header['Content-Encoding']
|
528
|
+
require 'zlib'
|
529
|
+
res.extend(DeflateReadBodyMixin)
|
530
|
+
res.gzip = true if ce == 'gzip'
|
531
|
+
else
|
532
|
+
res.extend(DirectReadBodyMixin)
|
533
|
+
end
|
534
|
+
|
535
|
+
res.extend(DirectReadBodyMixin)
|
536
|
+
if ce = res.header['Content-Encoding']
|
537
|
+
if ce == 'gzip'
|
538
|
+
infl = Zlib::Inflate.new(Zlib::MAX_WBITS + 16)
|
539
|
+
else
|
540
|
+
infl = Zlib::Inflate.new
|
541
|
+
end
|
542
|
+
end
|
543
|
+
|
544
|
+
total_compr_size = 0
|
514
545
|
res.each_fragment {|fragment|
|
546
|
+
total_compr_size += fragment.size
|
547
|
+
# uncompressed if the 'Content-Enconding' header is set in response
|
548
|
+
fragment = infl.inflate(fragment) if ce
|
515
549
|
io.write(fragment)
|
550
|
+
block.call(total_compr_size) if block_given?
|
516
551
|
}
|
517
552
|
}
|
518
553
|
nil
|
@@ -525,20 +560,57 @@ class API
|
|
525
560
|
end
|
526
561
|
end
|
527
562
|
|
563
|
+
# block is optional and must accept 1 argument
|
528
564
|
def job_result_each(job_id, &block)
|
529
565
|
require 'msgpack'
|
530
566
|
get("/v3/job/result/#{e job_id}", {'format'=>'msgpack'}) {|res|
|
531
567
|
if res.code != "200"
|
532
568
|
raise_error("Get job result failed", res)
|
533
569
|
end
|
534
|
-
|
535
|
-
|
536
|
-
|
570
|
+
|
571
|
+
# default to decompressing the response since format is fixed to 'msgpack'
|
572
|
+
res.extend(DeflateReadBodyMixin)
|
573
|
+
res.gzip = (res.header['Content-Encoding'] == 'gzip')
|
574
|
+
upkr = MessagePack::Unpacker.new
|
575
|
+
res.each_fragment {|inflated_fragment|
|
576
|
+
upkr.feed_each(inflated_fragment, &block)
|
537
577
|
}
|
538
578
|
}
|
539
579
|
nil
|
540
580
|
end
|
541
581
|
|
582
|
+
# block is optional and must accept 1 argument
|
583
|
+
def job_result_each_with_compr_size(job_id, &block)
|
584
|
+
require 'zlib'
|
585
|
+
require 'msgpack'
|
586
|
+
|
587
|
+
get("/v3/job/result/#{e job_id}", {'format'=>'msgpack'}) {|res|
|
588
|
+
if res.code != "200"
|
589
|
+
raise_error("Get job result failed", res)
|
590
|
+
end
|
591
|
+
|
592
|
+
res.extend(DirectReadBodyMixin)
|
593
|
+
if res.header['Content-Encoding'] == 'gzip'
|
594
|
+
infl = Zlib::Inflate.new(Zlib::MAX_WBITS + 16)
|
595
|
+
else
|
596
|
+
infl = Zlib::Inflate.new
|
597
|
+
end
|
598
|
+
upkr = MessagePack::Unpacker.new
|
599
|
+
begin
|
600
|
+
total_compr_size = 0
|
601
|
+
res.each_fragment {|fragment|
|
602
|
+
total_compr_size += fragment.size
|
603
|
+
upkr.feed_each(infl.inflate(fragment)) {|unpacked|
|
604
|
+
block.call(unpacked, total_compr_size) if block_given?
|
605
|
+
}
|
606
|
+
}
|
607
|
+
ensure
|
608
|
+
infl.close
|
609
|
+
end
|
610
|
+
}
|
611
|
+
nil
|
612
|
+
end
|
613
|
+
|
542
614
|
def job_result_raw(job_id, format)
|
543
615
|
code, body, res = get("/v3/job/result/#{e job_id}", {'format'=>format})
|
544
616
|
if code != "200"
|
@@ -816,7 +888,7 @@ class API
|
|
816
888
|
end
|
817
889
|
|
818
890
|
def update_schedule(name, params)
|
819
|
-
code, body, res =
|
891
|
+
code, body, res = post("/v3/schedule/update/#{e name}", params)
|
820
892
|
if code != "200"
|
821
893
|
raise_error("Update schedule failed", res)
|
822
894
|
end
|
@@ -1116,21 +1188,20 @@ class API
|
|
1116
1188
|
return status
|
1117
1189
|
end
|
1118
1190
|
|
1119
|
-
|
1120
1191
|
private
|
1121
1192
|
module DeflateReadBodyMixin
|
1122
1193
|
attr_accessor :gzip
|
1123
1194
|
|
1124
1195
|
def each_fragment(&block)
|
1125
1196
|
if @gzip
|
1126
|
-
infl = Zlib::Inflate.new(Zlib::MAX_WBITS+16)
|
1197
|
+
infl = Zlib::Inflate.new(Zlib::MAX_WBITS + 16)
|
1127
1198
|
else
|
1128
1199
|
infl = Zlib::Inflate.new
|
1129
1200
|
end
|
1130
1201
|
begin
|
1131
|
-
read_body
|
1202
|
+
read_body {|fragment|
|
1132
1203
|
block.call infl.inflate(fragment)
|
1133
|
-
|
1204
|
+
}
|
1134
1205
|
ensure
|
1135
1206
|
infl.close
|
1136
1207
|
end
|
@@ -1165,16 +1236,9 @@ class API
|
|
1165
1236
|
end
|
1166
1237
|
|
1167
1238
|
if block
|
1168
|
-
response = http.request(request)
|
1169
|
-
if ce = res.header['Content-Encoding']
|
1170
|
-
require 'zlib'
|
1171
|
-
res.extend(DeflateReadBodyMixin)
|
1172
|
-
res.gzip = true if ce == 'gzip'
|
1173
|
-
else
|
1174
|
-
res.extend(DirectReadBodyMixin)
|
1175
|
-
end
|
1239
|
+
response = http.request(request) {|res|
|
1176
1240
|
block.call(res)
|
1177
|
-
|
1241
|
+
}
|
1178
1242
|
else
|
1179
1243
|
response = http.request(request)
|
1180
1244
|
end
|
@@ -1190,7 +1254,7 @@ class API
|
|
1190
1254
|
if ce = response.header['content-encoding']
|
1191
1255
|
require 'zlib'
|
1192
1256
|
if ce == 'gzip'
|
1193
|
-
infl = Zlib::Inflate.new(Zlib::MAX_WBITS+16)
|
1257
|
+
infl = Zlib::Inflate.new(Zlib::MAX_WBITS + 16)
|
1194
1258
|
begin
|
1195
1259
|
body = infl.inflate(body)
|
1196
1260
|
ensure
|
@@ -1330,6 +1394,8 @@ class API
|
|
1330
1394
|
raise AlreadyExistsError, "#{msg}: #{error_msg}"
|
1331
1395
|
elsif status_code == "401"
|
1332
1396
|
raise AuthError, "#{msg}: #{error_msg}"
|
1397
|
+
elsif status_code == "403"
|
1398
|
+
raise ForbiddenError, "#{msg}: #{error_msg}"
|
1333
1399
|
else
|
1334
1400
|
raise APIError, "#{status_code}: #{msg}: #{error_msg}"
|
1335
1401
|
end
|
@@ -1343,6 +1409,8 @@ class API
|
|
1343
1409
|
raise AlreadyExistsError, "#{msg}: #{res.body}"
|
1344
1410
|
elsif status_code == "401"
|
1345
1411
|
raise AuthError, "#{msg}: #{res.body}"
|
1412
|
+
elsif status_code == "403"
|
1413
|
+
raise ForbiddenError, "#{msg}: #{res.body}"
|
1346
1414
|
else
|
1347
1415
|
raise APIError, "#{status_code}: #{msg}: #{res.body}"
|
1348
1416
|
end
|
@@ -1374,6 +1442,4 @@ class API
|
|
1374
1442
|
end
|
1375
1443
|
end
|
1376
1444
|
|
1377
|
-
|
1378
|
-
end
|
1379
|
-
|
1445
|
+
end # module TreasureData
|
data/lib/td/client/model.rb
CHANGED
@@ -28,29 +28,33 @@ class Account < Model
|
|
28
28
|
end
|
29
29
|
|
30
30
|
def storage_size_string
|
31
|
-
if @storage_size <= 1024*1024
|
31
|
+
if @storage_size <= 1024 * 1024
|
32
32
|
return "0.0 GB"
|
33
|
-
elsif @storage_size <= 60*1024*1024
|
33
|
+
elsif @storage_size <= 60 * 1024 * 1024
|
34
34
|
return "0.01 GB"
|
35
|
-
elsif @storage_size <= 60*1024*1024*1024
|
36
|
-
"%.1f GB" % (@storage_size.to_f / (1024*1024*1024))
|
35
|
+
elsif @storage_size <= 60 * 1024 * 1024 * 1024
|
36
|
+
"%.1f GB" % (@storage_size.to_f / (1024 * 1024 * 1024))
|
37
37
|
else
|
38
|
-
"%d GB" % (@storage_size.to_f / (1024*1024*1024)).to_i
|
38
|
+
"%d GB" % (@storage_size.to_f / (1024 * 1024 * 1024)).to_i
|
39
39
|
end
|
40
40
|
end
|
41
41
|
end
|
42
42
|
|
43
43
|
class Database < Model
|
44
|
-
|
44
|
+
PERMISSIONS = [:administrator, :full_access, :import_only, :query_only]
|
45
|
+
PERMISSION_LIST_TABLES = [:administrator, :full_access]
|
46
|
+
|
47
|
+
def initialize(client, db_name, tables=nil, count=nil, created_at=nil, updated_at=nil, org_name=nil, permission=nil)
|
45
48
|
super(client)
|
46
49
|
@db_name = db_name
|
47
50
|
@tables = tables
|
48
51
|
@count = count
|
49
52
|
@created_at = created_at
|
50
53
|
@updated_at = updated_at
|
54
|
+
@permission = permission.to_sym
|
51
55
|
end
|
52
56
|
|
53
|
-
attr_reader :org_name
|
57
|
+
attr_reader :org_name, :permission, :count
|
54
58
|
|
55
59
|
def name
|
56
60
|
@db_name
|
@@ -81,8 +85,6 @@ class Database < Model
|
|
81
85
|
@client.query(@db_name, q)
|
82
86
|
end
|
83
87
|
|
84
|
-
attr_reader :count
|
85
|
-
|
86
88
|
def created_at
|
87
89
|
@created_at && !@created_at.empty? ? Time.parse(@created_at) : nil
|
88
90
|
end
|
@@ -93,12 +95,19 @@ class Database < Model
|
|
93
95
|
|
94
96
|
def update_tables!
|
95
97
|
@tables = @client.tables(@db_name)
|
98
|
+
# provide Table objects with a reference to the parent Database to avoid
|
99
|
+
# requesting the Database information (such as permission) every time
|
100
|
+
@tables.each {|table|
|
101
|
+
table.database = self
|
102
|
+
}
|
96
103
|
end
|
104
|
+
|
97
105
|
end
|
98
106
|
|
99
107
|
class Table < Model
|
100
108
|
def initialize(client, db_name, table_name, type, schema, count, created_at=nil, updated_at=nil, estimated_storage_size=nil, last_import=nil, last_log_timestamp=nil, expire_days=nil, primary_key=nil, primary_key_type=nil)
|
101
109
|
super(client)
|
110
|
+
@database = nil
|
102
111
|
@db_name = db_name
|
103
112
|
@table_name = table_name
|
104
113
|
@type = type
|
@@ -119,6 +128,10 @@ class Table < Model
|
|
119
128
|
alias database_name db_name
|
120
129
|
alias name table_name
|
121
130
|
|
131
|
+
def database=(database)
|
132
|
+
@database = database if database.instance_of?(Database)
|
133
|
+
end
|
134
|
+
|
122
135
|
def created_at
|
123
136
|
@created_at && !@created_at.empty? ? Time.parse(@created_at) : nil
|
124
137
|
end
|
@@ -140,7 +153,13 @@ class Table < Model
|
|
140
153
|
end
|
141
154
|
|
142
155
|
def database
|
143
|
-
@
|
156
|
+
update_database! unless @database
|
157
|
+
@database
|
158
|
+
end
|
159
|
+
|
160
|
+
# get the database's permission as if they were the table's
|
161
|
+
def permission
|
162
|
+
database.permission
|
144
163
|
end
|
145
164
|
|
146
165
|
def identifier
|
@@ -174,6 +193,10 @@ class Table < Model
|
|
174
193
|
"%d GB" % (@estimated_storage_size.to_f / (1024*1024*1024)).to_i
|
175
194
|
end
|
176
195
|
end
|
196
|
+
|
197
|
+
def update_database!
|
198
|
+
@database = @client.database(@db_name)
|
199
|
+
end
|
177
200
|
end
|
178
201
|
|
179
202
|
class Schema
|
@@ -237,7 +260,9 @@ class Job < Model
|
|
237
260
|
STATUS_KILLED = "killed"
|
238
261
|
FINISHED_STATUS = [STATUS_SUCCESS, STATUS_ERROR, STATUS_KILLED]
|
239
262
|
|
240
|
-
def initialize(client, job_id, type, query, status=nil, url=nil, debug=nil, start_at=nil, end_at=nil, cpu_time=nil,
|
263
|
+
def initialize(client, job_id, type, query, status=nil, url=nil, debug=nil, start_at=nil, end_at=nil, cpu_time=nil,
|
264
|
+
result_size=nil, result=nil, result_url=nil, hive_result_schema=nil, priority=nil, retry_limit=nil,
|
265
|
+
org_name=nil, db_name=nil)
|
241
266
|
super(client)
|
242
267
|
@job_id = job_id
|
243
268
|
@type = type
|
@@ -248,6 +273,7 @@ class Job < Model
|
|
248
273
|
@start_at = start_at
|
249
274
|
@end_at = end_at
|
250
275
|
@cpu_time = cpu_time
|
276
|
+
@result_size = result_size
|
251
277
|
@result = result
|
252
278
|
@result_url = result_url
|
253
279
|
@hive_result_schema = hive_result_schema
|
@@ -307,6 +333,11 @@ class Job < Model
|
|
307
333
|
@hive_result_schema
|
308
334
|
end
|
309
335
|
|
336
|
+
def result_size
|
337
|
+
update_status! unless @result_size || finished?
|
338
|
+
@result_size
|
339
|
+
end
|
340
|
+
|
310
341
|
def result
|
311
342
|
unless @result
|
312
343
|
return nil unless finished?
|
@@ -315,9 +346,18 @@ class Job < Model
|
|
315
346
|
@result
|
316
347
|
end
|
317
348
|
|
318
|
-
def result_format(format, io=nil)
|
349
|
+
def result_format(format, io=nil, &block)
|
319
350
|
return nil unless finished?
|
320
|
-
@client.job_result_format(@job_id, format, io)
|
351
|
+
@client.job_result_format(@job_id, format, io, &block)
|
352
|
+
end
|
353
|
+
|
354
|
+
def result_each_with_compr_size(&block)
|
355
|
+
if @result
|
356
|
+
@result.each(&block)
|
357
|
+
else
|
358
|
+
@client.job_result_each_with_compr_size(@job_id, &block)
|
359
|
+
end
|
360
|
+
nil
|
321
361
|
end
|
322
362
|
|
323
363
|
def result_each(&block)
|
@@ -364,7 +404,9 @@ class Job < Model
|
|
364
404
|
end
|
365
405
|
|
366
406
|
def update_status!
|
367
|
-
type, query, status, url, debug, start_at, end_at, cpu_time,
|
407
|
+
type, query, status, url, debug, start_at, end_at, cpu_time,
|
408
|
+
result_size, result_url, hive_result_schema, priority, retry_limit,
|
409
|
+
org_name, db_name = @client.api.show_job(@job_id)
|
368
410
|
@query = query
|
369
411
|
@status = status
|
370
412
|
@url = url
|
@@ -372,6 +414,7 @@ class Job < Model
|
|
372
414
|
@start_at = start_at
|
373
415
|
@end_at = end_at
|
374
416
|
@cpu_time = cpu_time
|
417
|
+
@result_size = result_size
|
375
418
|
@result_url = result_url
|
376
419
|
@hive_result_schema = hive_result_schema
|
377
420
|
@priority = priority
|
@@ -395,7 +438,8 @@ end
|
|
395
438
|
|
396
439
|
|
397
440
|
class Schedule < Model
|
398
|
-
def initialize(client, name, cron, query, database=nil, result_url=nil, timezone=nil, delay=nil, next_time=nil,
|
441
|
+
def initialize(client, name, cron, query, database=nil, result_url=nil, timezone=nil, delay=nil, next_time=nil,
|
442
|
+
priority=nil, retry_limit=nil, org_name=nil)
|
399
443
|
super(client)
|
400
444
|
@name = name
|
401
445
|
@cron = cron
|