spark_toolkit 0.1.1-java → 0.1.2-java

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 56d3aadfc0f28d592c7051ee0fe9427df5f73383
4
- data.tar.gz: 0fa58599de96d34d0c1fcf613e288073867ce6b8
3
+ metadata.gz: 28e6117542c2bfe075aa07f26288d6b5d0393940
4
+ data.tar.gz: c46cbeb68f311feedf38da887e1e0793abac395a
5
5
  SHA512:
6
- metadata.gz: 20f30636b201ede56541e852fce3828211e4ee3aad31f83155afd8b58168a143d6c8c8bac913101be49790aa4acc601c55f2457039c722da9d0f2e666db72b4b
7
- data.tar.gz: 053cf2ee78cd8bf2707aaa0c255ce21cb0866bf56b43b84056a262b8d8b1b2f9f570f6869dbad6874f6997064388a48bc81e2d7932163b4dd3801077572901e4
6
+ metadata.gz: 2552ec0b597334361de3f60d450c769ebcf223426ad5e7a85f0b2f2cb7d63a73be3dc9f1873217d4d48f1bdf4a53e3265d7606eef10476987903613fee00cf13
7
+ data.tar.gz: c70b25a32b9c793ffca931e822ed9b726554d12191016551f3f10cc61dff963e645280c40fff2d5dbe66d5834a5d70cfa292f792de554c301d543d4bc7e943af
data/docs/YARN.md CHANGED
@@ -38,10 +38,10 @@ app_report_detail = app_report.get_detail
38
38
  ### Get Log of YARN Application
39
39
 
40
40
  ```ruby
41
- log = yarn.get_application_logs(app_id)
42
- formatted_log_stdout = SparkToolkit::YARN::SimpleFormatter.format(app_id, :stdout)
43
- formatted_log_stderr = SparkToolkit::YARN::SimpleFormatter.format(app_id, :stderr)
44
- formatted_log_all = SparkToolkit::YARN::SimpleFormatter.format(app_id, :all)
41
+ log_stdout = yarn.get_application_logs(app_id, :stdout)
42
+ log_stderr = yarn.get_application_logs(app_id, :stderr)
43
+ log_all = yarn.get_application_logs(app_id, :all)
44
+ formatted_log = SparkToolkit::YARN::SimpleLogFormatter.format(log_all)
45
45
  ```
46
46
 
47
47
  ### Get Report of YARN Attempt
@@ -41,6 +41,16 @@ module SparkToolkit
41
41
  @hdfs.copy_to_local_file(false, Path.new(hdfs_src), Path.new(local_dst), true)
42
42
  end
43
43
 
44
+ def get_file_status(entry)
45
+ @hdfs.get_file_status(Path.new(entry))
46
+ end
47
+ alias_method :status, :get_file_status
48
+
49
+ def rename(src, dst)
50
+ @hdfs.rename(Path.new(src), Path.new(dst))
51
+ end
52
+ alias_method :mv, :rename
53
+
44
54
  def exists?(path)
45
55
  @hdfs.exists(Path.new(path))
46
56
  end
@@ -17,7 +17,8 @@ class Java::OrgApacheHadoopYarnApiRecordsImplPb::ApplicationReportPBImpl
17
17
  tracking_url: get_tracking_url,
18
18
  start_time: get_start_time,
19
19
  finish_time: get_finish_time,
20
- state: get_yarn_application_state
20
+ app_state: get_yarn_application_state,
21
+ final_status: final_status
21
22
  }
22
23
  end
23
24
 
@@ -26,4 +27,8 @@ class Java::OrgApacheHadoopYarnApiRecordsImplPb::ApplicationReportPBImpl
26
27
  def get_yarn_application_state
27
28
  getYarnApplicationState.to_s.to_sym
28
29
  end
30
+
31
+ def final_status
32
+ final_application_status.to_s.to_sym
33
+ end
29
34
  end
@@ -1,7 +1,9 @@
1
1
  module SparkToolkit
2
2
  module YARN
3
3
  Client = Java::OrgApacheHadoopYarnClientApiImpl::YarnClientImpl
4
+
4
5
  class Client
6
+ attr_reader :conf
5
7
  alias_method :initalise, :initialize
6
8
  def initialize(conf=nil)
7
9
  initalise
@@ -28,6 +30,22 @@ module SparkToolkit
28
30
  getNodeReports.to_a
29
31
  end
30
32
 
33
+ def get_cluster_report
34
+ sum = get_node_reports.reduce([0,0,0,0]) do |sum, report|
35
+ sum[0] += report.get_total_memory
36
+ sum[1] += report.get_used_memory
37
+ sum[2] += report.get_total_vcores
38
+ sum[3] += report.get_used_vcores
39
+ sum
40
+ end
41
+ {
42
+ total_memory: sum[0],
43
+ used_memory: sum[1],
44
+ total_vcores: sum[2],
45
+ used_vcores: sum[3],
46
+ }
47
+ end
48
+
31
49
  # Available devs are:
32
50
  # - :all
33
51
  # - :stdout
@@ -1,4 +1,4 @@
1
- class Java::OrgApacheHadoopYarnApiRecordsImplPb::ResourcePBImpl
1
+ class Java::OrgApacheHadoopYarnApiRecordsImplPb::NodeReportPBImpl
2
2
  # get_node_id
3
3
 
4
4
  # get_num_containers
@@ -1,3 +1,3 @@
1
1
  module SparkToolkit
2
- VERSION = "0.1.1"
2
+ VERSION = "0.1.2"
3
3
  end
@@ -22,6 +22,8 @@ Gem::Specification.new do |spec|
22
22
  spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
23
23
  spec.require_paths = ["lib"]
24
24
 
25
+ spec.add_runtime_dependency 'hbase-jruby'
26
+
25
27
  spec.add_development_dependency "bundler", "~> 1.13"
26
28
  spec.add_development_dependency "pry"
27
29
  spec.add_development_dependency "rake", "~> 10.0"
metadata CHANGED
@@ -1,15 +1,29 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: spark_toolkit
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.1
4
+ version: 0.1.2
5
5
  platform: java
6
6
  authors:
7
7
  - Yuli Mo
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2017-02-08 00:00:00.000000000 Z
11
+ date: 2017-02-20 00:00:00.000000000 Z
12
12
  dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: hbase-jruby
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ">="
18
+ - !ruby/object:Gem::Version
19
+ version: '0'
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - ">="
25
+ - !ruby/object:Gem::Version
26
+ version: '0'
13
27
  - !ruby/object:Gem::Dependency
14
28
  name: bundler
15
29
  requirement: !ruby/object:Gem::Requirement