knife-hadoop 0.0.8 → 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/Gemfile.lock +5 -5
- data/README.md +15 -3
- data/knife-hadoop.gemspec +1 -1
- data/lib/chef/knife/hadoop_base.rb +10 -4
- data/lib/chef/knife/hadoop_hdfs_usage.rb +134 -0
- data/lib/chef/knife/hadoop_mapred_job_list.rb +1 -0
- data/lib/knife-hadoop/version.rb +1 -1
- metadata +24 -39
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: 9fa81ed05953792988f58df504fae11f92736765
|
4
|
+
data.tar.gz: 84f278679b4b3c4fbc4b6251a4223f3c9bbb4e25
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 80aefed486cf8dd1d28cb41d75d3cefa4ccf80fbecb5aa4f7f877b14f005b41e9b4b692596266ffb3b64c660de6a4d147e4567fa25fa6e30d75e2cd2065590d3
|
7
|
+
data.tar.gz: 876517ff2a7093e9b22ec39739957d1a414e4cc7a9d0304c05b2a2d24e6e26907c3202fcfc034a8c6a0989bf25b56fe008a02fdf97c90ac37fc8fa82b6376773
|
data/Gemfile.lock
CHANGED
@@ -1,8 +1,8 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
knife-hadoop (0.0.
|
5
|
-
chef
|
4
|
+
knife-hadoop (0.0.8)
|
5
|
+
chef (> 10.24.0)
|
6
6
|
debugger
|
7
7
|
pg
|
8
8
|
rest-client
|
@@ -28,7 +28,7 @@ GEM
|
|
28
28
|
rest-client (>= 1.0.4, < 1.7.0)
|
29
29
|
yajl-ruby (~> 1.1)
|
30
30
|
columnize (0.3.6)
|
31
|
-
debugger (1.
|
31
|
+
debugger (1.4.0)
|
32
32
|
columnize (>= 0.3.1)
|
33
33
|
debugger-linecache (~> 1.1.1)
|
34
34
|
debugger-ruby_core_source (~> 1.2.0)
|
@@ -46,7 +46,7 @@ GEM
|
|
46
46
|
mixlib-config (1.1.2)
|
47
47
|
mixlib-log (1.4.1)
|
48
48
|
mixlib-shellout (1.1.0)
|
49
|
-
net-ssh (2.6.
|
49
|
+
net-ssh (2.6.6)
|
50
50
|
net-ssh-gateway (1.2.0)
|
51
51
|
net-ssh (>= 2.6.5)
|
52
52
|
net-ssh-multi (1.1)
|
@@ -63,7 +63,7 @@ GEM
|
|
63
63
|
pg (0.14.1)
|
64
64
|
rest-client (1.6.7)
|
65
65
|
mime-types (>= 1.16)
|
66
|
-
sequel (3.
|
66
|
+
sequel (3.45.0)
|
67
67
|
sqlite3 (1.3.7)
|
68
68
|
systemu (2.5.2)
|
69
69
|
webhdfs (0.5.1)
|
data/README.md
CHANGED
@@ -3,6 +3,15 @@ Knife Hadoop
|
|
3
3
|
|
4
4
|
This is a Chef Knife plugin for Hadoop. This plugin gives knife the ability to provision, list, and manage Hadoop for Operators.
|
5
5
|
|
6
|
+
Version 0.1.0
|
7
|
+
Chef 11.x
|
8
|
+
|
9
|
+
Version 0.0.9
|
10
|
+
|
11
|
+
Added PostgreSQL port option
|
12
|
+
General clean up
|
13
|
+
|
14
|
+
|
6
15
|
Version 0.0.8
|
7
16
|
|
8
17
|
Bug Fixes.
|
@@ -13,7 +22,7 @@ HDFS APIs (currently supported) using the ruby webhdfs gem: https://github.com/k
|
|
13
22
|
https://github.com/murraju/webhdfs
|
14
23
|
|
15
24
|
a. List Directories and Files
|
16
|
-
b. Snapshot metadata information to a database (
|
25
|
+
b. Snapshot metadata information to a database (PostgreSQL or Sqlite). Useful for reporting and audits
|
17
26
|
c. Create Directories and Files
|
18
27
|
d. Update Files
|
19
28
|
e. Read Files
|
@@ -26,8 +35,9 @@ MapReduce APIs supported using the awesome work done by huahin: https://github.c
|
|
26
35
|
|
27
36
|
Issues:
|
28
37
|
|
29
|
-
1. The WebHDFS gem has bugs on net-http for create/delete
|
30
|
-
2. Not all methods are exposed
|
38
|
+
1. The WebHDFS gem has bugs on net-http for create/delete.
|
39
|
+
2. Not all methods are exposed.
|
40
|
+
3. HDFS usage still in development.
|
31
41
|
|
32
42
|
|
33
43
|
|
@@ -57,6 +67,7 @@ In order to communicate with Hadoop and other APIs, you will have to set paramet
|
|
57
67
|
knife[:db_username] = "dbusername"
|
58
68
|
knife[:db_password] = "dbpassword"
|
59
69
|
knife[:db_host] = "dbhost"
|
70
|
+
knife[:db_host] = "port"
|
60
71
|
knife[:db] = "db"
|
61
72
|
|
62
73
|
If your knife.rb file will be checked into a SCM system (ie readable by others) you may want to read the values from environment variables:
|
@@ -70,6 +81,7 @@ If your knife.rb file will be checked into a SCM system (ie readable by others)
|
|
70
81
|
knife[:db_username] = "#{ENV['DB_USERNAME']}"
|
71
82
|
knife[:db_password] = "#{ENV['DB_PASSWORD']}"
|
72
83
|
knife[:db_host] = "#{ENV['DB_HOST']}"
|
84
|
+
knife[:db_host] = "#{ENV['DB_PORT']}"
|
73
85
|
knife[:db] = "#{ENV['DB']}"
|
74
86
|
|
75
87
|
|
data/knife-hadoop.gemspec
CHANGED
@@ -87,6 +87,12 @@ class Chef
|
|
87
87
|
:long => "--db-host DBHOST",
|
88
88
|
:description => "PostgreSQL DB Host",
|
89
89
|
:proc => Proc.new { |key| Chef::Config[:knife][:db_host] = key }
|
90
|
+
|
91
|
+
option :db_port,
|
92
|
+
:short => "-F DBPORT",
|
93
|
+
:long => "--db-port DBPORT",
|
94
|
+
:description => "PostgreSQL DB Port",
|
95
|
+
:proc => Proc.new { |key| Chef::Config[:knife][:db_port] = key }
|
90
96
|
|
91
97
|
end
|
92
98
|
end
|
@@ -109,14 +115,14 @@ class Chef
|
|
109
115
|
Chef::Log.debug("db_username: #{Chef::Config[:knife][:db_username]}")
|
110
116
|
Chef::Log.debug("db_password: #{Chef::Config[:knife][:db_password]}")
|
111
117
|
Chef::Log.debug("db_host: #{Chef::Config[:knife][:db_host]}")
|
118
|
+
Chef::Log.debug("db_port: #{Chef::Config[:knife][:db_port]}")
|
112
119
|
db_type = "#{Chef::Config[:knife][:db_type]}".downcase
|
113
120
|
case db_type
|
114
121
|
when 'postgres'
|
115
122
|
@db_connection ||= begin
|
116
|
-
db_connection = Sequel.
|
117
|
-
|
118
|
-
|
119
|
-
"#{Chef::Config[:knife][:db]}")
|
123
|
+
db_connection = Sequel.postgres("#{Chef::Config[:knife][:db]}", :user=>"#{Chef::Config[:knife][:db_username]}",
|
124
|
+
:password => "#{Chef::Config[:knife][:db_password]}", :host => "#{Chef::Config[:knife][:db_host]}",
|
125
|
+
:port => "#{Chef::Config[:knife][:db_port]}", :max_connections => 10)
|
120
126
|
end
|
121
127
|
when 'sqlite'
|
122
128
|
@db_connection ||= begin
|
@@ -0,0 +1,134 @@
|
|
1
|
+
# Author:: Murali Raju (<murali.raju@appliv.com>)
|
2
|
+
# Copyright:: Copyright (c) 2012 Murali Raju.
|
3
|
+
# License:: Apache License, Version 2.0
|
4
|
+
#
|
5
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
6
|
+
# you may not use this file except in compliance with the License.
|
7
|
+
# You may obtain a copy of the License at
|
8
|
+
#
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
+
#
|
11
|
+
# Unless required by applicable law or agreed to in writing, software
|
12
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
13
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
14
|
+
# See the License for the specific language governing permissions and
|
15
|
+
# limitations under the License.
|
16
|
+
#
|
17
|
+
|
18
|
+
require 'chef/knife/hadoop_base'
|
19
|
+
|
20
|
+
class Chef
|
21
|
+
class Knife
|
22
|
+
class HadoopHdfsUsage < Knife
|
23
|
+
|
24
|
+
include Knife::HadoopBase
|
25
|
+
|
26
|
+
deps do
|
27
|
+
require 'readline'
|
28
|
+
require 'chef/json_compat'
|
29
|
+
require 'chef/knife/bootstrap'
|
30
|
+
require 'net/ssh'
|
31
|
+
Chef::Knife::Bootstrap.load_deps
|
32
|
+
end
|
33
|
+
|
34
|
+
banner "knife hadoop hdfs usage (options)"
|
35
|
+
|
36
|
+
option :type,
|
37
|
+
:short => "-T TYPE",
|
38
|
+
:long => "--type TYPE",
|
39
|
+
:description => "The type <summary,detail,report>",
|
40
|
+
:proc => Proc.new { |f| Chef::Config[:knife][:type] = f }
|
41
|
+
|
42
|
+
option :dir,
|
43
|
+
:short => "-D DIRECTORY",
|
44
|
+
:long => "--hdfs-directory DIRECTORY",
|
45
|
+
:description => "The HDFS Directory to use",
|
46
|
+
:proc => Proc.new { |f| Chef::Config[:knife][:dir] = f }
|
47
|
+
|
48
|
+
option :ssh_user,
|
49
|
+
:short => "-U SSHUSER",
|
50
|
+
:long => "--ssh-user SSHUSER",
|
51
|
+
:description => "The SSH User",
|
52
|
+
:proc => Proc.new { |f| Chef::Config[:knife][:ssh_user] = f }
|
53
|
+
|
54
|
+
option :ssh_password,
|
55
|
+
:short => "-P SSHPASSWORD",
|
56
|
+
:long => "--ssh-password SSHPASSWORD",
|
57
|
+
:description => "The SSH User Password",
|
58
|
+
:proc => Proc.new { |f| Chef::Config[:knife][:ssh_password] = f }
|
59
|
+
|
60
|
+
def run
|
61
|
+
$stdout.sync = true
|
62
|
+
|
63
|
+
hdfs_usage_summary_list = [
|
64
|
+
ui.color('Configured Capacity', :bold),
|
65
|
+
ui.color('Present Capacity', :bold),
|
66
|
+
ui.color('DFS Remaining', :bold),
|
67
|
+
ui.color('DFS Used', :bold),
|
68
|
+
ui.color('DFS Used%', :bold),
|
69
|
+
ui.color('Under replicated blocks', :bold),
|
70
|
+
ui.color('Blocks with corrupt replicas', :bold),
|
71
|
+
ui.color('Missing blocks', :bold),
|
72
|
+
ui.color('Datanodes available', :bold)
|
73
|
+
]
|
74
|
+
|
75
|
+
hdfs_usage_node_list = [
|
76
|
+
ui.color('Data Node', :bold),
|
77
|
+
ui.color('Decommission Status', :bold),
|
78
|
+
ui.color('Configured Capacity', :bold),
|
79
|
+
ui.color('DFS Used', :bold),
|
80
|
+
ui.color('Non DFS Used', :bold),
|
81
|
+
ui.color('DFS Remaining', :bold),
|
82
|
+
ui.color('DFS Used%', :bold),
|
83
|
+
ui.color('DFS Remaining%', :bold),
|
84
|
+
ui.color('Last contact', :bold)
|
85
|
+
]
|
86
|
+
|
87
|
+
type = "#{Chef::Config[:knife][:type]}".downcase
|
88
|
+
case type
|
89
|
+
when 'summary'
|
90
|
+
Net::SSH.start( "#{Chef::Config[:knife][:namenode_host]}",
|
91
|
+
"#{Chef::Config[:knife][:ssh_user]}", :password => "#{Chef::Config[:knife][:ssh_password]}" ) do|ssh|
|
92
|
+
result = ssh.exec!('hadoop dfsadmin -report')
|
93
|
+
hdfs_usage_summary_list << result.match(/Configured Capacity: \d+(.*?)/).to_s.split(':')[1].gsub(/\s+/, "")
|
94
|
+
hdfs_usage_summary_list << result.match(/Present Capacity: \d+(.*?)/).to_s.split(':')[1].gsub(/\s+/, "")
|
95
|
+
hdfs_usage_summary_list << result.match(/DFS Remaining: \d+(.*?)/).to_s.split(':')[1].gsub(/\s+/, "")
|
96
|
+
hdfs_usage_summary_list << result.match(/DFS Used: \d+(.*?)/).to_s.split(':')[1].gsub(/\s+/, "")
|
97
|
+
hdfs_usage_summary_list << result.match(/DFS Used%: \d+(.*?).*/).to_s.split(':')[1].gsub(/\s+/, "")
|
98
|
+
hdfs_usage_summary_list << result.match(/Under replicated blocks: \d+(.*?)/).to_s.split(':')[1].gsub(/\s+/, "")
|
99
|
+
hdfs_usage_summary_list << result.match(/Blocks with corrupt replicas: \d+(.*?)/).to_s.split(':')[1].gsub(/\s+/, "")
|
100
|
+
hdfs_usage_summary_list << result.match(/Missing blocks: \d+(.*?)/).to_s.split(':')[1].gsub(/\s+/, "")
|
101
|
+
hdfs_usage_summary_list << result.match(/Datanodes available: \d+(.*?)/).to_s.split(':')[1].gsub(/\s+/, "")
|
102
|
+
end
|
103
|
+
puts ui.list(hdfs_usage_summary_list, :uneven_columns_across, 9)
|
104
|
+
when 'detail'
|
105
|
+
Net::SSH.start( "#{Chef::Config[:knife][:namenode_host]}",
|
106
|
+
"#{Chef::Config[:knife][:ssh_user]}", :password => "#{Chef::Config[:knife][:ssh_password]}" ) do|ssh|
|
107
|
+
result = ssh.exec!('hadoop dfsadmin -report')
|
108
|
+
hdfs_usage_node_list << result.match(/Name: \d+(.*?).*/).to_s.split(':')[1].gsub(/\s+/, "")
|
109
|
+
hdfs_usage_node_list << result.match(/Decommission Status : \w+(.*?).*/).to_s.split(':')[1].gsub(/\s+/, "")
|
110
|
+
hdfs_usage_node_list << result.match(/Configured Capacity: \d+(.*?)/).to_s.split(':')[1].gsub(/\s+/, "")
|
111
|
+
hdfs_usage_node_list << result.match(/DFS Used: \d+(.*?)/).to_s.split(':')[1].gsub(/\s+/, "")
|
112
|
+
hdfs_usage_node_list << result.match(/Non DFS Used: \d+(.*?)/).to_s.split(':')[1].gsub(/\s+/, "")
|
113
|
+
hdfs_usage_node_list << result.match(/DFS Remaining: \d+(.*?)/).to_s.split(':')[1].gsub(/\s+/, "")
|
114
|
+
hdfs_usage_node_list << result.match(/DFS Used%: \d+(.*?).*/).to_s.split(':')[1].gsub(/\s+/, "")
|
115
|
+
hdfs_usage_node_list << result.match(/DFS Remaining%: \d+(.*?).*/).to_s.split(':')[1].gsub(/\s+/, "")
|
116
|
+
hdfs_usage_node_list << result.match(/Last contact: \w+(.*?) .*/).to_s.split(':')[1]
|
117
|
+
end
|
118
|
+
puts ui.list(hdfs_usage_node_list, :uneven_columns_across, 9)
|
119
|
+
when 'report'
|
120
|
+
Net::SSH.start( "#{Chef::Config[:knife][:namenode_host]}",
|
121
|
+
"#{Chef::Config[:knife][:ssh_user]}", :password => "#{Chef::Config[:knife][:ssh_password]}" ) do|ssh|
|
122
|
+
result = ssh.exec!('hadoop dfsadmin -report')
|
123
|
+
file = "hdfs_usage_report_created_on_#{Time.now}.txt"
|
124
|
+
File.open("/tmp/#{file}", 'w') do |f|
|
125
|
+
f.write(result)
|
126
|
+
f.close
|
127
|
+
end
|
128
|
+
hdfs_connection.create("#{Chef::Config[:knife][:dir]}/#{file}", result)
|
129
|
+
end
|
130
|
+
end
|
131
|
+
end
|
132
|
+
end
|
133
|
+
end
|
134
|
+
end
|
data/lib/knife-hadoop/version.rb
CHANGED
metadata
CHANGED
@@ -1,128 +1,113 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: knife-hadoop
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0
|
5
|
-
prerelease:
|
4
|
+
version: 0.1.0
|
6
5
|
platform: ruby
|
7
6
|
authors:
|
8
7
|
- Murali Raju
|
9
8
|
autorequire:
|
10
9
|
bindir: bin
|
11
10
|
cert_chain: []
|
12
|
-
date: 2013-
|
11
|
+
date: 2013-03-17 00:00:00.000000000 Z
|
13
12
|
dependencies:
|
14
13
|
- !ruby/object:Gem::Dependency
|
15
14
|
name: webhdfs
|
16
15
|
requirement: !ruby/object:Gem::Requirement
|
17
|
-
none: false
|
18
16
|
requirements:
|
19
|
-
- -
|
17
|
+
- - '>='
|
20
18
|
- !ruby/object:Gem::Version
|
21
19
|
version: '0'
|
22
20
|
type: :runtime
|
23
21
|
prerelease: false
|
24
22
|
version_requirements: !ruby/object:Gem::Requirement
|
25
|
-
none: false
|
26
23
|
requirements:
|
27
|
-
- -
|
24
|
+
- - '>='
|
28
25
|
- !ruby/object:Gem::Version
|
29
26
|
version: '0'
|
30
27
|
- !ruby/object:Gem::Dependency
|
31
28
|
name: pg
|
32
29
|
requirement: !ruby/object:Gem::Requirement
|
33
|
-
none: false
|
34
30
|
requirements:
|
35
|
-
- -
|
31
|
+
- - '>='
|
36
32
|
- !ruby/object:Gem::Version
|
37
33
|
version: '0'
|
38
34
|
type: :runtime
|
39
35
|
prerelease: false
|
40
36
|
version_requirements: !ruby/object:Gem::Requirement
|
41
|
-
none: false
|
42
37
|
requirements:
|
43
|
-
- -
|
38
|
+
- - '>='
|
44
39
|
- !ruby/object:Gem::Version
|
45
40
|
version: '0'
|
46
41
|
- !ruby/object:Gem::Dependency
|
47
42
|
name: sqlite3
|
48
43
|
requirement: !ruby/object:Gem::Requirement
|
49
|
-
none: false
|
50
44
|
requirements:
|
51
|
-
- -
|
45
|
+
- - '>='
|
52
46
|
- !ruby/object:Gem::Version
|
53
47
|
version: '0'
|
54
48
|
type: :runtime
|
55
49
|
prerelease: false
|
56
50
|
version_requirements: !ruby/object:Gem::Requirement
|
57
|
-
none: false
|
58
51
|
requirements:
|
59
|
-
- -
|
52
|
+
- - '>='
|
60
53
|
- !ruby/object:Gem::Version
|
61
54
|
version: '0'
|
62
55
|
- !ruby/object:Gem::Dependency
|
63
56
|
name: sequel
|
64
57
|
requirement: !ruby/object:Gem::Requirement
|
65
|
-
none: false
|
66
58
|
requirements:
|
67
|
-
- -
|
59
|
+
- - '>='
|
68
60
|
- !ruby/object:Gem::Version
|
69
61
|
version: '0'
|
70
62
|
type: :runtime
|
71
63
|
prerelease: false
|
72
64
|
version_requirements: !ruby/object:Gem::Requirement
|
73
|
-
none: false
|
74
65
|
requirements:
|
75
|
-
- -
|
66
|
+
- - '>='
|
76
67
|
- !ruby/object:Gem::Version
|
77
68
|
version: '0'
|
78
69
|
- !ruby/object:Gem::Dependency
|
79
70
|
name: debugger
|
80
71
|
requirement: !ruby/object:Gem::Requirement
|
81
|
-
none: false
|
82
72
|
requirements:
|
83
|
-
- -
|
73
|
+
- - '>='
|
84
74
|
- !ruby/object:Gem::Version
|
85
75
|
version: '0'
|
86
76
|
type: :runtime
|
87
77
|
prerelease: false
|
88
78
|
version_requirements: !ruby/object:Gem::Requirement
|
89
|
-
none: false
|
90
79
|
requirements:
|
91
|
-
- -
|
80
|
+
- - '>='
|
92
81
|
- !ruby/object:Gem::Version
|
93
82
|
version: '0'
|
94
83
|
- !ruby/object:Gem::Dependency
|
95
84
|
name: rest-client
|
96
85
|
requirement: !ruby/object:Gem::Requirement
|
97
|
-
none: false
|
98
86
|
requirements:
|
99
|
-
- -
|
87
|
+
- - '>='
|
100
88
|
- !ruby/object:Gem::Version
|
101
89
|
version: '0'
|
102
90
|
type: :runtime
|
103
91
|
prerelease: false
|
104
92
|
version_requirements: !ruby/object:Gem::Requirement
|
105
|
-
none: false
|
106
93
|
requirements:
|
107
|
-
- -
|
94
|
+
- - '>='
|
108
95
|
- !ruby/object:Gem::Version
|
109
96
|
version: '0'
|
110
97
|
- !ruby/object:Gem::Dependency
|
111
98
|
name: chef
|
112
99
|
requirement: !ruby/object:Gem::Requirement
|
113
|
-
none: false
|
114
100
|
requirements:
|
115
|
-
- -
|
101
|
+
- - '>'
|
116
102
|
- !ruby/object:Gem::Version
|
117
|
-
version:
|
103
|
+
version: 11.0.0
|
118
104
|
type: :runtime
|
119
105
|
prerelease: false
|
120
106
|
version_requirements: !ruby/object:Gem::Requirement
|
121
|
-
none: false
|
122
107
|
requirements:
|
123
|
-
- -
|
108
|
+
- - '>'
|
124
109
|
- !ruby/object:Gem::Version
|
125
|
-
version:
|
110
|
+
version: 11.0.0
|
126
111
|
description: Hadoop Chef Knife Plugin
|
127
112
|
email:
|
128
113
|
- murraju@appliv.com
|
@@ -144,32 +129,32 @@ files:
|
|
144
129
|
- lib/chef/knife/hadoop_hdfs_list.rb
|
145
130
|
- lib/chef/knife/hadoop_hdfs_snapshot.rb
|
146
131
|
- lib/chef/knife/hadoop_hdfs_update.rb
|
132
|
+
- lib/chef/knife/hadoop_hdfs_usage.rb
|
147
133
|
- lib/chef/knife/hadoop_mapred_job_kill.rb
|
148
134
|
- lib/chef/knife/hadoop_mapred_job_list.rb
|
149
135
|
- lib/chef/knife/hadoop_setup.rb
|
150
136
|
- lib/knife-hadoop/version.rb
|
151
137
|
homepage: https://github.com/murraju/knife-hadoop
|
152
138
|
licenses: []
|
139
|
+
metadata: {}
|
153
140
|
post_install_message:
|
154
141
|
rdoc_options: []
|
155
142
|
require_paths:
|
156
143
|
- lib
|
157
144
|
required_ruby_version: !ruby/object:Gem::Requirement
|
158
|
-
none: false
|
159
145
|
requirements:
|
160
|
-
- -
|
146
|
+
- - '>='
|
161
147
|
- !ruby/object:Gem::Version
|
162
148
|
version: '0'
|
163
149
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
164
|
-
none: false
|
165
150
|
requirements:
|
166
|
-
- -
|
151
|
+
- - '>='
|
167
152
|
- !ruby/object:Gem::Version
|
168
153
|
version: '0'
|
169
154
|
requirements: []
|
170
155
|
rubyforge_project:
|
171
|
-
rubygems_version:
|
156
|
+
rubygems_version: 2.0.3
|
172
157
|
signing_key:
|
173
|
-
specification_version:
|
158
|
+
specification_version: 4
|
174
159
|
summary: Hadoop Chef Knife Plugin
|
175
160
|
test_files: []
|