mobilize-hive 1.291 → 1.292
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/mobilize-hive/handlers/hive.rb +19 -12
- data/lib/mobilize-hive/version.rb +1 -1
- data/mobilize-hive.gemspec +1 -1
- metadata +4 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: c6d52c5de86351ccf5ab3c4da09b7341f27e5163
|
4
|
+
data.tar.gz: 3e560d834babc1377d8e3c6c80b799e9c4655acf
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: d910e35cefda69f9640105949454b04b6d08df9069e2e9fdb61b9875806c921fb187701977afee7efcc3a5b84dadbbd91abf4d6bf9b4817bcf4f3c1945effbfb
|
7
|
+
data.tar.gz: 9a41441ce0ddf76b81fe63350a2ece3d1cc5ab0daccb1bc09f6db394dcb618078c4d01c1893d3a16090656dfdd36ec5fa8332aa5a699d096abcd85b726a1efa2
|
@@ -262,7 +262,8 @@ module Mobilize
|
|
262
262
|
temp_set_hql = "set mapred.job.name=#{job_name} (temp table);"
|
263
263
|
temp_drop_hql = "drop table if exists #{temp_table_path};"
|
264
264
|
temp_create_hql = "#{temp_set_hql}#{prior_hql}#{temp_drop_hql}create table #{temp_table_path} as #{last_select_hql}"
|
265
|
-
Hive.run(cluster,temp_create_hql,user_name)
|
265
|
+
response = Hive.run(cluster,temp_create_hql,user_name)
|
266
|
+
raise response['stderr'] if response['stderr'].to_s.ie{|s| s.index("FAILED") or s.index("KILLED")}
|
266
267
|
|
267
268
|
source_table_stats = Hive.table_stats(cluster,temp_db,temp_table_name,user_name)
|
268
269
|
source_fields = source_table_stats['field_defs']
|
@@ -300,7 +301,9 @@ module Mobilize
|
|
300
301
|
target_insert_hql,
|
301
302
|
temp_drop_hql].join
|
302
303
|
|
303
|
-
Hive.run(cluster, target_full_hql, user_name)
|
304
|
+
response = Hive.run(cluster, target_full_hql, user_name)
|
305
|
+
|
306
|
+
raise response['stderr'] if response['stderr'].to_s.ie{|s| s.index("FAILED") or s.index("KILLED")}
|
304
307
|
|
305
308
|
elsif part_array.length > 0 and
|
306
309
|
table_stats.ie{|tts| tts.nil? || drop || tts['partitions'].to_a.map{|p| p['name']} == part_array}
|
@@ -350,7 +353,9 @@ module Mobilize
|
|
350
353
|
part_set_hql = "set hive.cli.print.header=true;set mapred.job.name=#{job_name} (permutations);"
|
351
354
|
part_select_hql = "select distinct #{target_part_stmt} from #{temp_table_path};"
|
352
355
|
part_perm_hql = part_set_hql + part_select_hql
|
353
|
-
|
356
|
+
response = Hive.run(cluster, part_perm_hql, user_name)
|
357
|
+
raise response['stderr'] if response['stderr'].to_s.ie{|s| s.index("FAILED") or s.index("KILLED")}
|
358
|
+
part_perm_tsv = response['stdout']
|
354
359
|
#having gotten the permutations, ensure they are dropped
|
355
360
|
part_hash_array = part_perm_tsv.tsv_to_hash_array
|
356
361
|
part_drop_hql = part_hash_array.map do |h|
|
@@ -368,7 +373,8 @@ module Mobilize
|
|
368
373
|
|
369
374
|
target_full_hql = [target_set_hql, target_create_hql, target_insert_hql, temp_drop_hql].join
|
370
375
|
|
371
|
-
Hive.run(cluster, target_full_hql, user_name)
|
376
|
+
response = Hive.run(cluster, target_full_hql, user_name)
|
377
|
+
raise response['stderr'] if response['stderr'].to_s.ie{|s| s.index("FAILED") or s.index("KILLED")}
|
372
378
|
else
|
373
379
|
error_msg = "Incompatible partition specs"
|
374
380
|
raise error_msg
|
@@ -414,7 +420,8 @@ module Mobilize
|
|
414
420
|
|
415
421
|
target_full_hql = [target_drop_hql,target_create_hql,target_insert_hql].join(";")
|
416
422
|
|
417
|
-
Hive.run(cluster, target_full_hql, user_name, file_hash)
|
423
|
+
response = Hive.run(cluster, target_full_hql, user_name, file_hash)
|
424
|
+
raise response['stderr'] if response['stderr'].to_s.ie{|s| s.index("FAILED") or s.index("KILLED")}
|
418
425
|
|
419
426
|
elsif part_array.length > 0 and
|
420
427
|
table_stats.ie{|tts| tts.nil? || drop || tts['partitions'].to_a.map{|p| p['name']} == part_array}
|
@@ -441,7 +448,8 @@ module Mobilize
|
|
441
448
|
"partitioned by #{partition_defs}"
|
442
449
|
|
443
450
|
#create target table early if not here
|
444
|
-
Hive.run(cluster, target_create_hql, user_name)
|
451
|
+
response = Hive.run(cluster, target_create_hql, user_name)
|
452
|
+
raise response['stderr'] if response['stderr'].to_s.ie{|s| s.index("FAILED") or s.index("KILLED")}
|
445
453
|
|
446
454
|
table_stats = Hive.table_stats(cluster, db, table, user_name)
|
447
455
|
|
@@ -480,7 +488,8 @@ module Mobilize
|
|
480
488
|
#run actual partition adds all at once
|
481
489
|
if target_part_hql.length>0
|
482
490
|
puts "Adding partitions to #{cluster}/#{db}/#{table} for #{user_name} at #{Time.now.utc}"
|
483
|
-
Hive.run(cluster, target_part_hql, user_name)
|
491
|
+
response = Hive.run(cluster, target_part_hql, user_name)
|
492
|
+
raise response['stderr'] if response['stderr'].to_s.ie{|s| s.index("FAILED") or s.index("KILLED")}
|
484
493
|
end
|
485
494
|
else
|
486
495
|
error_msg = "Incompatible partition specs: " +
|
@@ -488,6 +497,7 @@ module Mobilize
|
|
488
497
|
"user_params:#{part_array.to_s}"
|
489
498
|
raise error_msg
|
490
499
|
end
|
500
|
+
|
491
501
|
url = "hive://" + [cluster,db,table,part_array.compact.join("/")].join("/")
|
492
502
|
return url
|
493
503
|
end
|
@@ -580,11 +590,8 @@ module Mobilize
|
|
580
590
|
select_hql = "select * from #{source_path};"
|
581
591
|
hql = [set_hql,select_hql].join
|
582
592
|
response = Hive.run(cluster, hql,user_name)
|
583
|
-
if response['
|
584
|
-
|
585
|
-
else
|
586
|
-
raise "Unable to read hive://#{dst_path} with error: #{response['stderr']}"
|
587
|
-
end
|
593
|
+
raise "Unable to read hive://#{dst_path} with error: #{response['stderr']}" if response['stderr'].to_s.ie{|s| s.index("FAILED") or s.index("KILLED")}
|
594
|
+
return response['stdout']
|
588
595
|
end
|
589
596
|
|
590
597
|
def Hive.write_by_dataset_path(dst_path,source_tsv,user_name,*args)
|
data/mobilize-hive.gemspec
CHANGED
@@ -16,5 +16,5 @@ Gem::Specification.new do |gem|
|
|
16
16
|
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
|
17
17
|
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
|
18
18
|
gem.require_paths = ["lib"]
|
19
|
-
gem.add_runtime_dependency "mobilize-hdfs","1.
|
19
|
+
gem.add_runtime_dependency "mobilize-hdfs","1.292"
|
20
20
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: mobilize-hive
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: '1.
|
4
|
+
version: '1.292'
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Cassio Paes-Leme
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2013-03-
|
11
|
+
date: 2013-03-28 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: mobilize-hdfs
|
@@ -16,14 +16,14 @@ dependencies:
|
|
16
16
|
requirements:
|
17
17
|
- - '='
|
18
18
|
- !ruby/object:Gem::Version
|
19
|
-
version: '1.
|
19
|
+
version: '1.292'
|
20
20
|
type: :runtime
|
21
21
|
prerelease: false
|
22
22
|
version_requirements: !ruby/object:Gem::Requirement
|
23
23
|
requirements:
|
24
24
|
- - '='
|
25
25
|
- !ruby/object:Gem::Version
|
26
|
-
version: '1.
|
26
|
+
version: '1.292'
|
27
27
|
description: Adds hive read, write, and run support to mobilize-hdfs
|
28
28
|
email:
|
29
29
|
- cpaesleme@dena.com
|