knife-hadoop 0.0.4
Sign up to get free protection for your applications and to get access to all the features.
- data/Gemfile +4 -0
- data/Gemfile.lock +75 -0
- data/LICENSE +201 -0
- data/README.md +90 -0
- data/Rakefile +2 -0
- data/configs/hosfw.json +1 -0
- data/knife-hadoop.gemspec +28 -0
- data/lib/chef/knife/hadoop_base.rb +129 -0
- data/lib/chef/knife/hadoop_hdfs_create.rb +97 -0
- data/lib/chef/knife/hadoop_hdfs_list.rb +93 -0
- data/lib/chef/knife/hadoop_hdfs_snapshot.rb +99 -0
- data/lib/chef/knife/hadoop_hdfs_update.rb +57 -0
- data/lib/chef/knife/hadoop_mapred_job_kill.rb +69 -0
- data/lib/chef/knife/hadoop_mapred_job_list.rb +102 -0
- data/lib/chef/knife/hadoop_setup.rb +73 -0
- data/lib/knife-hadoop/version.rb +6 -0
- metadata +159 -0
data/Gemfile
ADDED
data/Gemfile.lock
ADDED
@@ -0,0 +1,75 @@
|
|
1
|
+
PATH
|
2
|
+
remote: .
|
3
|
+
specs:
|
4
|
+
knife-hadoop (0.0.1)
|
5
|
+
chef
|
6
|
+
debugger
|
7
|
+
pg
|
8
|
+
rest-client
|
9
|
+
sequel
|
10
|
+
webhdfs
|
11
|
+
|
12
|
+
GEM
|
13
|
+
remote: http://rubygems.org/
|
14
|
+
specs:
|
15
|
+
chef (11.4.0)
|
16
|
+
erubis
|
17
|
+
highline (>= 1.6.9)
|
18
|
+
json (>= 1.4.4, <= 1.7.7)
|
19
|
+
mixlib-authentication (>= 1.3.0)
|
20
|
+
mixlib-cli (~> 1.3.0)
|
21
|
+
mixlib-config (>= 1.1.2)
|
22
|
+
mixlib-log (>= 1.3.0)
|
23
|
+
mixlib-shellout
|
24
|
+
net-ssh (~> 2.6)
|
25
|
+
net-ssh-multi (~> 1.1.0)
|
26
|
+
ohai (>= 0.6.0)
|
27
|
+
rest-client (>= 1.0.4, < 1.7.0)
|
28
|
+
yajl-ruby (~> 1.1)
|
29
|
+
columnize (0.3.6)
|
30
|
+
debugger (1.3.1)
|
31
|
+
columnize (>= 0.3.1)
|
32
|
+
debugger-linecache (~> 1.1.1)
|
33
|
+
debugger-ruby_core_source (~> 1.1.8)
|
34
|
+
debugger-linecache (1.1.2)
|
35
|
+
debugger-ruby_core_source (>= 1.1.1)
|
36
|
+
debugger-ruby_core_source (1.1.8)
|
37
|
+
erubis (2.7.0)
|
38
|
+
highline (1.6.15)
|
39
|
+
ipaddress (0.8.0)
|
40
|
+
json (1.7.7)
|
41
|
+
mime-types (1.21)
|
42
|
+
mixlib-authentication (1.3.0)
|
43
|
+
mixlib-log
|
44
|
+
mixlib-cli (1.3.0)
|
45
|
+
mixlib-config (1.1.2)
|
46
|
+
mixlib-log (1.4.1)
|
47
|
+
mixlib-shellout (1.1.0)
|
48
|
+
net-ssh (2.6.5)
|
49
|
+
net-ssh-gateway (1.2.0)
|
50
|
+
net-ssh (>= 2.6.5)
|
51
|
+
net-ssh-multi (1.1)
|
52
|
+
net-ssh (>= 2.1.4)
|
53
|
+
net-ssh-gateway (>= 0.99.0)
|
54
|
+
ohai (6.16.0)
|
55
|
+
ipaddress
|
56
|
+
mixlib-cli
|
57
|
+
mixlib-config
|
58
|
+
mixlib-log
|
59
|
+
mixlib-shellout
|
60
|
+
systemu
|
61
|
+
yajl-ruby
|
62
|
+
pg (0.14.1)
|
63
|
+
rest-client (1.6.7)
|
64
|
+
mime-types (>= 1.16)
|
65
|
+
sequel (3.44.0)
|
66
|
+
systemu (2.5.2)
|
67
|
+
webhdfs (0.5.1)
|
68
|
+
yajl-ruby (1.1.0)
|
69
|
+
|
70
|
+
PLATFORMS
|
71
|
+
java
|
72
|
+
ruby
|
73
|
+
|
74
|
+
DEPENDENCIES
|
75
|
+
knife-hadoop!
|
data/LICENSE
ADDED
@@ -0,0 +1,201 @@
|
|
1
|
+
Apache License
|
2
|
+
Version 2.0, January 2004
|
3
|
+
http://www.apache.org/licenses/
|
4
|
+
|
5
|
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
6
|
+
|
7
|
+
1. Definitions.
|
8
|
+
|
9
|
+
"License" shall mean the terms and conditions for use, reproduction,
|
10
|
+
and distribution as defined by Sections 1 through 9 of this document.
|
11
|
+
|
12
|
+
"Licensor" shall mean the copyright owner or entity authorized by
|
13
|
+
the copyright owner that is granting the License.
|
14
|
+
|
15
|
+
"Legal Entity" shall mean the union of the acting entity and all
|
16
|
+
other entities that control, are controlled by, or are under common
|
17
|
+
control with that entity. For the purposes of this definition,
|
18
|
+
"control" means (i) the power, direct or indirect, to cause the
|
19
|
+
direction or management of such entity, whether by contract or
|
20
|
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
21
|
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
22
|
+
|
23
|
+
"You" (or "Your") shall mean an individual or Legal Entity
|
24
|
+
exercising permissions granted by this License.
|
25
|
+
|
26
|
+
"Source" form shall mean the preferred form for making modifications,
|
27
|
+
including but not limited to software source code, documentation
|
28
|
+
source, and configuration files.
|
29
|
+
|
30
|
+
"Object" form shall mean any form resulting from mechanical
|
31
|
+
transformation or translation of a Source form, including but
|
32
|
+
not limited to compiled object code, generated documentation,
|
33
|
+
and conversions to other media types.
|
34
|
+
|
35
|
+
"Work" shall mean the work of authorship, whether in Source or
|
36
|
+
Object form, made available under the License, as indicated by a
|
37
|
+
copyright notice that is included in or attached to the work
|
38
|
+
(an example is provided in the Appendix below).
|
39
|
+
|
40
|
+
"Derivative Works" shall mean any work, whether in Source or Object
|
41
|
+
form, that is based on (or derived from) the Work and for which the
|
42
|
+
editorial revisions, annotations, elaborations, or other modifications
|
43
|
+
represent, as a whole, an original work of authorship. For the purposes
|
44
|
+
of this License, Derivative Works shall not include works that remain
|
45
|
+
separable from, or merely link (or bind by name) to the interfaces of,
|
46
|
+
the Work and Derivative Works thereof.
|
47
|
+
|
48
|
+
"Contribution" shall mean any work of authorship, including
|
49
|
+
the original version of the Work and any modifications or additions
|
50
|
+
to that Work or Derivative Works thereof, that is intentionally
|
51
|
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
52
|
+
or by an individual or Legal Entity authorized to submit on behalf of
|
53
|
+
the copyright owner. For the purposes of this definition, "submitted"
|
54
|
+
means any form of electronic, verbal, or written communication sent
|
55
|
+
to the Licensor or its representatives, including but not limited to
|
56
|
+
communication on electronic mailing lists, source code control systems,
|
57
|
+
and issue tracking systems that are managed by, or on behalf of, the
|
58
|
+
Licensor for the purpose of discussing and improving the Work, but
|
59
|
+
excluding communication that is conspicuously marked or otherwise
|
60
|
+
designated in writing by the copyright owner as "Not a Contribution."
|
61
|
+
|
62
|
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
63
|
+
on behalf of whom a Contribution has been received by Licensor and
|
64
|
+
subsequently incorporated within the Work.
|
65
|
+
|
66
|
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
67
|
+
this License, each Contributor hereby grants to You a perpetual,
|
68
|
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
69
|
+
copyright license to reproduce, prepare Derivative Works of,
|
70
|
+
publicly display, publicly perform, sublicense, and distribute the
|
71
|
+
Work and such Derivative Works in Source or Object form.
|
72
|
+
|
73
|
+
3. Grant of Patent License. Subject to the terms and conditions of
|
74
|
+
this License, each Contributor hereby grants to You a perpetual,
|
75
|
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
76
|
+
(except as stated in this section) patent license to make, have made,
|
77
|
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
78
|
+
where such license applies only to those patent claims licensable
|
79
|
+
by such Contributor that are necessarily infringed by their
|
80
|
+
Contribution(s) alone or by combination of their Contribution(s)
|
81
|
+
with the Work to which such Contribution(s) was submitted. If You
|
82
|
+
institute patent litigation against any entity (including a
|
83
|
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
84
|
+
or a Contribution incorporated within the Work constitutes direct
|
85
|
+
or contributory patent infringement, then any patent licenses
|
86
|
+
granted to You under this License for that Work shall terminate
|
87
|
+
as of the date such litigation is filed.
|
88
|
+
|
89
|
+
4. Redistribution. You may reproduce and distribute copies of the
|
90
|
+
Work or Derivative Works thereof in any medium, with or without
|
91
|
+
modifications, and in Source or Object form, provided that You
|
92
|
+
meet the following conditions:
|
93
|
+
|
94
|
+
(a) You must give any other recipients of the Work or
|
95
|
+
Derivative Works a copy of this License; and
|
96
|
+
|
97
|
+
(b) You must cause any modified files to carry prominent notices
|
98
|
+
stating that You changed the files; and
|
99
|
+
|
100
|
+
(c) You must retain, in the Source form of any Derivative Works
|
101
|
+
that You distribute, all copyright, patent, trademark, and
|
102
|
+
attribution notices from the Source form of the Work,
|
103
|
+
excluding those notices that do not pertain to any part of
|
104
|
+
the Derivative Works; and
|
105
|
+
|
106
|
+
(d) If the Work includes a "NOTICE" text file as part of its
|
107
|
+
distribution, then any Derivative Works that You distribute must
|
108
|
+
include a readable copy of the attribution notices contained
|
109
|
+
within such NOTICE file, excluding those notices that do not
|
110
|
+
pertain to any part of the Derivative Works, in at least one
|
111
|
+
of the following places: within a NOTICE text file distributed
|
112
|
+
as part of the Derivative Works; within the Source form or
|
113
|
+
documentation, if provided along with the Derivative Works; or,
|
114
|
+
within a display generated by the Derivative Works, if and
|
115
|
+
wherever such third-party notices normally appear. The contents
|
116
|
+
of the NOTICE file are for informational purposes only and
|
117
|
+
do not modify the License. You may add Your own attribution
|
118
|
+
notices within Derivative Works that You distribute, alongside
|
119
|
+
or as an addendum to the NOTICE text from the Work, provided
|
120
|
+
that such additional attribution notices cannot be construed
|
121
|
+
as modifying the License.
|
122
|
+
|
123
|
+
You may add Your own copyright statement to Your modifications and
|
124
|
+
may provide additional or different license terms and conditions
|
125
|
+
for use, reproduction, or distribution of Your modifications, or
|
126
|
+
for any such Derivative Works as a whole, provided Your use,
|
127
|
+
reproduction, and distribution of the Work otherwise complies with
|
128
|
+
the conditions stated in this License.
|
129
|
+
|
130
|
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
131
|
+
any Contribution intentionally submitted for inclusion in the Work
|
132
|
+
by You to the Licensor shall be under the terms and conditions of
|
133
|
+
this License, without any additional terms or conditions.
|
134
|
+
Notwithstanding the above, nothing herein shall supersede or modify
|
135
|
+
the terms of any separate license agreement you may have executed
|
136
|
+
with Licensor regarding such Contributions.
|
137
|
+
|
138
|
+
6. Trademarks. This License does not grant permission to use the trade
|
139
|
+
names, trademarks, service marks, or product names of the Licensor,
|
140
|
+
except as required for reasonable and customary use in describing the
|
141
|
+
origin of the Work and reproducing the content of the NOTICE file.
|
142
|
+
|
143
|
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
144
|
+
agreed to in writing, Licensor provides the Work (and each
|
145
|
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
146
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
147
|
+
implied, including, without limitation, any warranties or conditions
|
148
|
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
149
|
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
150
|
+
appropriateness of using or redistributing the Work and assume any
|
151
|
+
risks associated with Your exercise of permissions under this License.
|
152
|
+
|
153
|
+
8. Limitation of Liability. In no event and under no legal theory,
|
154
|
+
whether in tort (including negligence), contract, or otherwise,
|
155
|
+
unless required by applicable law (such as deliberate and grossly
|
156
|
+
negligent acts) or agreed to in writing, shall any Contributor be
|
157
|
+
liable to You for damages, including any direct, indirect, special,
|
158
|
+
incidental, or consequential damages of any character arising as a
|
159
|
+
result of this License or out of the use or inability to use the
|
160
|
+
Work (including but not limited to damages for loss of goodwill,
|
161
|
+
work stoppage, computer failure or malfunction, or any and all
|
162
|
+
other commercial damages or losses), even if such Contributor
|
163
|
+
has been advised of the possibility of such damages.
|
164
|
+
|
165
|
+
9. Accepting Warranty or Additional Liability. While redistributing
|
166
|
+
the Work or Derivative Works thereof, You may choose to offer,
|
167
|
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
168
|
+
or other liability obligations and/or rights consistent with this
|
169
|
+
License. However, in accepting such obligations, You may act only
|
170
|
+
on Your own behalf and on Your sole responsibility, not on behalf
|
171
|
+
of any other Contributor, and only if You agree to indemnify,
|
172
|
+
defend, and hold each Contributor harmless for any liability
|
173
|
+
incurred by, or claims asserted against, such Contributor by reason
|
174
|
+
of your accepting any such warranty or additional liability.
|
175
|
+
|
176
|
+
END OF TERMS AND CONDITIONS
|
177
|
+
|
178
|
+
APPENDIX: How to apply the Apache License to your work.
|
179
|
+
|
180
|
+
To apply the Apache License to your work, attach the following
|
181
|
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
182
|
+
replaced with your own identifying information. (Don't include
|
183
|
+
the brackets!) The text should be enclosed in the appropriate
|
184
|
+
comment syntax for the file format. We also recommend that a
|
185
|
+
file or class name and description of purpose be included on the
|
186
|
+
same "printed page" as the copyright notice for easier
|
187
|
+
identification within third-party archives.
|
188
|
+
|
189
|
+
Copyright [yyyy] [name of copyright owner]
|
190
|
+
|
191
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
192
|
+
you may not use this file except in compliance with the License.
|
193
|
+
You may obtain a copy of the License at
|
194
|
+
|
195
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
196
|
+
|
197
|
+
Unless required by applicable law or agreed to in writing, software
|
198
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
199
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
200
|
+
See the License for the specific language governing permissions and
|
201
|
+
limitations under the License.
|
data/README.md
ADDED
@@ -0,0 +1,90 @@
|
|
1
|
+
Knife Hadoop
|
2
|
+
===============
|
3
|
+
|
4
|
+
This is a Chef Knife plugin for Hadoop. This plugin gives knife the ability to provision, list, and manage Hadoop for Operators.
|
5
|
+
|
6
|
+
Version 0.0.3
|
7
|
+
|
8
|
+
Features:
|
9
|
+
|
10
|
+
HDFS APIs (currently supported) using the ruby webhdfs gem: https://github.com/kzk/webhdfs. Extensions to webhdfs will be hosted at
|
11
|
+
https://github.com/murraju/webhdfs
|
12
|
+
|
13
|
+
a. List Directories and Files
|
14
|
+
b. Snapshot metadata information to a database (PostgreSQL for now). Useful for reporting and audits
|
15
|
+
c. Create Directories and Files
|
16
|
+
d. Update Files
|
17
|
+
e. Read Files
|
18
|
+
|
19
|
+
MapReduce APIs supported using the awesome work done by huahin: https://github.com/huahin
|
20
|
+
|
21
|
+
a. Start/List/Kill MapReduce Jobs by JobID and JobName
|
22
|
+
|
23
|
+
|
24
|
+
|
25
|
+
Issues:
|
26
|
+
|
27
|
+
1. The WebHDFS gem has bugs on net-http for create/delete
|
28
|
+
2. Not all methods are exposed
|
29
|
+
|
30
|
+
|
31
|
+
|
32
|
+
# Installation #
|
33
|
+
|
34
|
+
Be sure you are running the latest version Chef. Versions earlier than 0.10.0 don't support plugins:
|
35
|
+
|
36
|
+
$ gem install chef
|
37
|
+
|
38
|
+
This plugin is distributed as a Ruby Gem. To install it, run:
|
39
|
+
|
40
|
+
$ gem install knife-hadoop
|
41
|
+
|
42
|
+
Depending on your system's configuration, you may need to run this command with root privileges.
|
43
|
+
|
44
|
+
# Configuration #
|
45
|
+
|
46
|
+
In order to communicate with Hadoop and other APIs, you will have to set parameters. The easiest way to accomplish this is to create some entries in your `knife.rb` file:
|
47
|
+
|
48
|
+
knife[:namenode_host] = "namenode"
|
49
|
+
knife[:namenode_port] = "port"
|
50
|
+
knife[:namenode_username] = "namenode_username"
|
51
|
+
knife[:mapred_mgmt_host] = "mapred_mgmt_host"
|
52
|
+
knife[:mapred_mgmt_port] = "mapred_mgmt_port"
|
53
|
+
knife[:db_username] = "dbusername"
|
54
|
+
knife[:db_password] = "dbpassword"
|
55
|
+
knife[:db_host] = "dbhost"
|
56
|
+
knife[:db] = "db"
|
57
|
+
|
58
|
+
If your knife.rb file will be checked into a SCM system (ie readable by others) you may want to read the values from environment variables:
|
59
|
+
|
60
|
+
knife[:namenode_host] = "#{ENV['NAMENODE_HOST']}"
|
61
|
+
knife[:namenode_port] = "#{ENV['NAMENODE_PORT']}"
|
62
|
+
knife[:namenode_username] = "#{ENV['NAMENODE_USERNAME']}"
|
63
|
+
knife[:mapred_mgmt_host] = "#{ENV['MAPRED_MGMT_HOST']}"
|
64
|
+
knife[:mapred_mgmt_port] = "#{ENV['MAPRED_MGMT_PORT']}"
|
65
|
+
knife[:db_username] = "#{ENV['DB_USERNAME']}"
|
66
|
+
knife[:db_password] = "#{ENV['DB_PASSWORD']}"
|
67
|
+
knife[:db_host] = "#{ENV['DB_HOST']}"
|
68
|
+
knife[:db] = "#{ENV['DB']}"
|
69
|
+
|
70
|
+
|
71
|
+
|
72
|
+
# License #
|
73
|
+
|
74
|
+
Author:: Murali Raju <murali.raju@appliv.com>
|
75
|
+
|
76
|
+
Copyright:: Copyright (c) 2012 Murali Raju <murali.raju@appliv.com>
|
77
|
+
|
78
|
+
License:: Apache License, Version 2.0
|
79
|
+
|
80
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
81
|
+
you may not use this file except in compliance with the License.
|
82
|
+
You may obtain a copy of the License at
|
83
|
+
|
84
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
85
|
+
|
86
|
+
Unless required by applicable law or agreed to in writing, software
|
87
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
88
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
89
|
+
See the License for the specific language governing permissions and
|
90
|
+
limitations under the License.
|
data/Rakefile
ADDED
data/configs/hosfw.json
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
|
@@ -0,0 +1,28 @@
|
|
1
|
+
# -*- encoding: utf-8 -*-
|
2
|
+
$:.push File.expand_path("../lib", __FILE__)
|
3
|
+
require "knife-hadoop/version"
|
4
|
+
|
5
|
+
Gem::Specification.new do |s|
|
6
|
+
s.name = "knife-hadoop"
|
7
|
+
s.version = Knife::Hadoop::VERSION
|
8
|
+
s.platform = Gem::Platform::RUBY
|
9
|
+
s.has_rdoc = true
|
10
|
+
s.extra_rdoc_files = ["README.md", "LICENSE" ]
|
11
|
+
s.authors = ["Murali Raju"]
|
12
|
+
s.email = ["murraju@appliv.com"]
|
13
|
+
s.homepage = "https://github.com/murraju/knife-hadoop"
|
14
|
+
s.summary = %q{Hadoop Chef Knife Plugin}
|
15
|
+
s.description = s.summary
|
16
|
+
|
17
|
+
s.files = `git ls-files`.split("\n")
|
18
|
+
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
|
19
|
+
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
|
20
|
+
s.require_paths = ["lib"]
|
21
|
+
|
22
|
+
s.add_dependency "webhdfs"
|
23
|
+
s.add_dependency "pg"
|
24
|
+
s.add_dependency "sequel"
|
25
|
+
s.add_dependency "debugger"
|
26
|
+
s.add_dependency "rest-client"
|
27
|
+
s.add_dependency "chef"
|
28
|
+
end
|
@@ -0,0 +1,129 @@
|
|
1
|
+
# Author:: Murali Raju (<murali.raju@appliv.com>)
|
2
|
+
# Copyright:: Copyright (c) 2012 Murali Raju.
|
3
|
+
# License:: Apache License, Version 2.0
|
4
|
+
#
|
5
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
6
|
+
# you may not use this file except in compliance with the License.
|
7
|
+
# You may obtain a copy of the License at
|
8
|
+
#
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
+
#
|
11
|
+
# Unless required by applicable law or agreed to in writing, software
|
12
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
13
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
14
|
+
# See the License for the specific language governing permissions and
|
15
|
+
# limitations under the License.
|
16
|
+
#
|
17
|
+
|
18
|
+
require 'chef/knife'
|
19
|
+
require 'webhdfs'
|
20
|
+
require 'debugger'
|
21
|
+
require 'sequel'
|
22
|
+
require 'rest-client'
|
23
|
+
require 'json'
|
24
|
+
|
25
|
+
class Chef
|
26
|
+
class Knife
|
27
|
+
module HadoopBase
|
28
|
+
|
29
|
+
def self.included(includer)
|
30
|
+
includer.class_eval do
|
31
|
+
|
32
|
+
deps do
|
33
|
+
require 'readline'
|
34
|
+
require 'chef/json_compat'
|
35
|
+
end
|
36
|
+
|
37
|
+
option :namenode_username,
|
38
|
+
:short => "-U USERNAME",
|
39
|
+
:long => "--namenode-username USERNAME",
|
40
|
+
:description => "NameNode Username",
|
41
|
+
:proc => Proc.new { |key| Chef::Config[:knife][:namenode_username] = key }
|
42
|
+
|
43
|
+
option :namenode_password,
|
44
|
+
:short => "-P PASSWORD",
|
45
|
+
:long => "--namenode-password PASSWORD",
|
46
|
+
:description => "NameNode password",
|
47
|
+
:proc => Proc.new { |key| Chef::Config[:knife][:namenode_password] = key }
|
48
|
+
|
49
|
+
option :namenode_host,
|
50
|
+
:short => "-H HOST",
|
51
|
+
:long => "--namenode-host HOST",
|
52
|
+
:description => "NameNode FQDN or IP address",
|
53
|
+
:proc => Proc.new { |endpoint| Chef::Config[:knife][:namenode_host] = endpoint }
|
54
|
+
|
55
|
+
option :namenode_port,
|
56
|
+
:short => "-A PORT",
|
57
|
+
:long => "--namenode-port PORT",
|
58
|
+
:description => "NameNode port",
|
59
|
+
:proc => Proc.new { |key| Chef::Config[:knife][:namenode_port] = key }
|
60
|
+
|
61
|
+
option :db,
|
62
|
+
:short => "-D DATABASE",
|
63
|
+
:long => "--database DATABASE",
|
64
|
+
:description => "PostgreSQL Database to use for Hadoop Management Data",
|
65
|
+
:proc => Proc.new { |key| Chef::Config[:knife][:db] = key }
|
66
|
+
|
67
|
+
option :db_username,
|
68
|
+
:short => "-B DBUSERNAME",
|
69
|
+
:long => "--db-username DBUSERNAME",
|
70
|
+
:description => "PostgreSQL DB Username",
|
71
|
+
:proc => Proc.new { |key| Chef::Config[:knife][:db_username] = key }
|
72
|
+
|
73
|
+
option :db_password,
|
74
|
+
:short => "-C DBPASSWORD",
|
75
|
+
:long => "--db-password DBPASSWORD",
|
76
|
+
:description => "PostgreSQL DB Password",
|
77
|
+
:proc => Proc.new { |key| Chef::Config[:knife][:db_password] = key }
|
78
|
+
|
79
|
+
option :db_host,
|
80
|
+
:short => "-I DBHOST",
|
81
|
+
:long => "--db-host DBHOST",
|
82
|
+
:description => "PostgreSQL DB Host",
|
83
|
+
:proc => Proc.new { |key| Chef::Config[:knife][:db_host] = key }
|
84
|
+
|
85
|
+
end
|
86
|
+
end
|
87
|
+
|
88
|
+
def hdfs_connection
|
89
|
+
Chef::Log.debug("username: #{Chef::Config[:knife][:namenode_username]}")
|
90
|
+
Chef::Log.debug("password: #{Chef::Config[:knife][:namenode_password]}")
|
91
|
+
Chef::Log.debug("host: #{Chef::Config[:knife][:namenode_host]}")
|
92
|
+
Chef::Log.debug("port: #{Chef::Config[:knife][:namenode_port]}")
|
93
|
+
@hdfs_connection ||= begin
|
94
|
+
hdfs_connection = WebHDFS::Client.new("#{Chef::Config[:knife][:namenode_host]}",
|
95
|
+
"#{Chef::Config[:knife][:namenode_port]}",
|
96
|
+
"#{Chef::Config[:knife][:namenode_username]}")
|
97
|
+
end
|
98
|
+
end
|
99
|
+
|
100
|
+
def db_connection
|
101
|
+
Chef::Log.debug("db: #{Chef::Config[:knife][:db]}")
|
102
|
+
Chef::Log.debug("db_username: #{Chef::Config[:knife][:db_username]}")
|
103
|
+
Chef::Log.debug("db_password: #{Chef::Config[:knife][:db_password]}")
|
104
|
+
Chef::Log.debug("db_host: #{Chef::Config[:knife][:db_host]}")
|
105
|
+
@db_connection ||= begin
|
106
|
+
db_connection = Sequel.connect("postgres://#{Chef::Config[:knife][:db_username]}"+':'+
|
107
|
+
"#{Chef::Config[:knife][:db_password]}"+'@'+
|
108
|
+
"#{Chef::Config[:knife][:db_host]}"+'/'+
|
109
|
+
"#{Chef::Config[:knife][:db]}")
|
110
|
+
end
|
111
|
+
end
|
112
|
+
|
113
|
+
|
114
|
+
def locate_config_value(key)
|
115
|
+
key = key.to_sym
|
116
|
+
Chef::Config[:knife][key] || config[key]
|
117
|
+
end
|
118
|
+
|
119
|
+
def msg_pair(label, value, color=:cyan)
|
120
|
+
if value && !value.to_s.empty?
|
121
|
+
puts "#{ui.color(label, color)}: #{value}"
|
122
|
+
end
|
123
|
+
end
|
124
|
+
|
125
|
+
end
|
126
|
+
end
|
127
|
+
end
|
128
|
+
|
129
|
+
|
@@ -0,0 +1,97 @@
|
|
1
|
+
# Author:: Murali Raju (<murali.raju@appliv.com>)
|
2
|
+
# Copyright:: Copyright (c) 2012 Murali Raju.
|
3
|
+
# License:: Apache License, Version 2.0
|
4
|
+
#
|
5
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
6
|
+
# you may not use this file except in compliance with the License.
|
7
|
+
# You may obtain a copy of the License at
|
8
|
+
#
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
+
#
|
11
|
+
# Unless required by applicable law or agreed to in writing, software
|
12
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
13
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
14
|
+
# See the License for the specific language governing permissions and
|
15
|
+
# limitations under the License.
|
16
|
+
#
|
17
|
+
require 'chef/knife/hadoop_base'
|
18
|
+
|
19
|
+
class Chef
|
20
|
+
class Knife
|
21
|
+
class HadoopHdfsCreate < Knife
|
22
|
+
|
23
|
+
include Knife::HadoopBase
|
24
|
+
|
25
|
+
deps do
|
26
|
+
require 'readline'
|
27
|
+
require 'chef/json_compat'
|
28
|
+
require 'chef/knife/bootstrap'
|
29
|
+
Chef::Knife::Bootstrap.load_deps
|
30
|
+
end
|
31
|
+
|
32
|
+
banner "knife hadoop hdfs create (options)"
|
33
|
+
|
34
|
+
option :type,
|
35
|
+
:short => "-T TYPE",
|
36
|
+
:long => "--type TYPE",
|
37
|
+
:description => "The type <dir,file>",
|
38
|
+
:proc => Proc.new { |f| Chef::Config[:knife][:type] = f }
|
39
|
+
|
40
|
+
option :data,
|
41
|
+
:short => "-D DATA",
|
42
|
+
:long => "--data DATA",
|
43
|
+
:description => "The data to be populated into a file",
|
44
|
+
:proc => Proc.new { |f| Chef::Config[:knife][:data] = f }
|
45
|
+
|
46
|
+
option :path,
|
47
|
+
:short => "-P PATH",
|
48
|
+
:long => "--path PATH",
|
49
|
+
:description => "The HDFS path - Directory or File to create",
|
50
|
+
:proc => Proc.new { |f| Chef::Config[:knife][:path] = f }
|
51
|
+
|
52
|
+
option :overwrite,
|
53
|
+
:short => "-O OVERWRITE",
|
54
|
+
:long => "--overwrite OVERWRITE",
|
55
|
+
:description => "The overwrite bolean <true,fale>",
|
56
|
+
:proc => Proc.new { |f| Chef::Config[:knife][:overwrite] = f }
|
57
|
+
|
58
|
+
option :blocksize,
|
59
|
+
:short => "-B BLOCKSIZE",
|
60
|
+
:long => "--blocksize BLOCKSIZE",
|
61
|
+
:description => "The blocksize",
|
62
|
+
:proc => Proc.new { |f| Chef::Config[:knife][:blocksize] = f }
|
63
|
+
|
64
|
+
option :replication,
|
65
|
+
:short => "-R REPLICATION",
|
66
|
+
:long => "--replication REPLICATION",
|
67
|
+
:description => "The replication factor <n>",
|
68
|
+
:proc => Proc.new { |f| Chef::Config[:knife][:replication] = f }
|
69
|
+
|
70
|
+
option :permission,
|
71
|
+
:short => "-P PERM",
|
72
|
+
:long => "--permission PERMISSION",
|
73
|
+
:description => "The permissions of the directory",
|
74
|
+
:proc => Proc.new { |f| Chef::Config[:knife][:permission] = f }
|
75
|
+
|
76
|
+
|
77
|
+
def run
|
78
|
+
$stdout.sync = true
|
79
|
+
|
80
|
+
type = "#{Chef::Config[:knife][:type]}".downcase
|
81
|
+
case type
|
82
|
+
when 'dir'
|
83
|
+
hdfs_connection.mkdir("#{Chef::Config[:knife][:path]}", :permission => "#{Chef::Config[:knife][:permission]}")
|
84
|
+
when 'file'
|
85
|
+
; debugger
|
86
|
+
hdfs_connection.create("#{Chef::Config[:knife][:path]}", "#{Chef::Config[:knife][:data]}",
|
87
|
+
:overwrite => "#{Chef::Config[:knife][:overwrite]}", :blocksize => "#{Chef::Config[:knife][:blocksize]}",
|
88
|
+
:replication => "#{Chef::Config[:knife][:replication]}", :permission => "#{Chef::Config[:knife][:permission]}"
|
89
|
+
)
|
90
|
+
else
|
91
|
+
ui.error ("Incorrect options. Please use --help to list options.")
|
92
|
+
end
|
93
|
+
end
|
94
|
+
end
|
95
|
+
end
|
96
|
+
end
|
97
|
+
|
@@ -0,0 +1,93 @@
|
|
1
|
+
# Author:: Murali Raju (<murali.raju@appliv.com>)
|
2
|
+
# Copyright:: Copyright (c) 2012 Murali Raju.
|
3
|
+
# License:: Apache License, Version 2.0
|
4
|
+
#
|
5
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
6
|
+
# you may not use this file except in compliance with the License.
|
7
|
+
# You may obtain a copy of the License at
|
8
|
+
#
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
+
#
|
11
|
+
# Unless required by applicable law or agreed to in writing, software
|
12
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
13
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
14
|
+
# See the License for the specific language governing permissions and
|
15
|
+
# limitations under the License.
|
16
|
+
#
|
17
|
+
require 'chef/knife/hadoop_base'
|
18
|
+
|
19
|
+
class Chef
|
20
|
+
class Knife
|
21
|
+
class HadoopHdfsList < Knife
|
22
|
+
|
23
|
+
include Knife::HadoopBase
|
24
|
+
|
25
|
+
deps do
|
26
|
+
require 'readline'
|
27
|
+
require 'chef/json_compat'
|
28
|
+
require 'chef/knife/bootstrap'
|
29
|
+
Chef::Knife::Bootstrap.load_deps
|
30
|
+
end
|
31
|
+
|
32
|
+
banner "knife hadoop hdfs list (options)"
|
33
|
+
|
34
|
+
option :dir,
|
35
|
+
:short => "-D DIR",
|
36
|
+
:long => "--directory DIRECTORY",
|
37
|
+
:description => "The HDFS directory",
|
38
|
+
:proc => Proc.new { |f| Chef::Config[:knife][:dir] = f }
|
39
|
+
|
40
|
+
def run
|
41
|
+
$stdout.sync = true
|
42
|
+
|
43
|
+
hdfs_list = [
|
44
|
+
ui.color('Directory', :bold),
|
45
|
+
ui.color('accessTime', :bold),
|
46
|
+
ui.color('blockSize', :bold),
|
47
|
+
ui.color('group', :bold),
|
48
|
+
ui.color('length', :bold),
|
49
|
+
ui.color('modificationTime', :bold),
|
50
|
+
ui.color('owner', :bold),
|
51
|
+
ui.color('pathSuffix', :bold),
|
52
|
+
ui.color('permission', :bold),
|
53
|
+
ui.color('replication', :bold),
|
54
|
+
ui.color('type', :bold)
|
55
|
+
]
|
56
|
+
|
57
|
+
#There has to be a more elegant way to do the below iteration :-)
|
58
|
+
|
59
|
+
hdfs_layout = hdfs_connection.list("#{Chef::Config[:knife][:dir]}")
|
60
|
+
hdfs_layout.each do |item|
|
61
|
+
hdfs_list << "#{Chef::Config[:knife][:dir]}"
|
62
|
+
item.each do |k, v|
|
63
|
+
if "#{k}" == 'accessTime'
|
64
|
+
hdfs_list << item['accessTime'].to_s
|
65
|
+
elsif "#{k}" == 'blockSize'
|
66
|
+
hdfs_list << item['blockSize'].to_s
|
67
|
+
elsif "#{k}" == 'group'
|
68
|
+
hdfs_list << item['group'].to_s
|
69
|
+
elsif "#{k}" == 'length'
|
70
|
+
hdfs_list << item['length'].to_s
|
71
|
+
elsif "#{k}" == 'modificationTime'
|
72
|
+
hdfs_list << item['modificationTime'].to_s
|
73
|
+
elsif "#{k}" == 'owner'
|
74
|
+
hdfs_list << item['owner'].to_s
|
75
|
+
elsif "#{k}" == 'pathSuffix'
|
76
|
+
hdfs_list << item['pathSuffix'].to_s
|
77
|
+
elsif "#{k}" == 'permission'
|
78
|
+
hdfs_list << item['permission'].to_s
|
79
|
+
elsif "#{k}" == 'replication'
|
80
|
+
hdfs_list << item['replication'].to_s
|
81
|
+
elsif "#{k}" == 'type'
|
82
|
+
hdfs_list << item['type'].to_s
|
83
|
+
else
|
84
|
+
ui.error ("WebHDFS is not responding. Please debug")
|
85
|
+
end
|
86
|
+
end
|
87
|
+
end
|
88
|
+
puts ui.list(hdfs_list, :uneven_columns_across, 11)
|
89
|
+
end
|
90
|
+
end
|
91
|
+
end
|
92
|
+
end
|
93
|
+
|
@@ -0,0 +1,99 @@
|
|
1
|
+
# Author:: Murali Raju (<murali.raju@appliv.com>)
|
2
|
+
# Copyright:: Copyright (c) 2012 Murali Raju.
|
3
|
+
# License:: Apache License, Version 2.0
|
4
|
+
#
|
5
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
6
|
+
# you may not use this file except in compliance with the License.
|
7
|
+
# You may obtain a copy of the License at
|
8
|
+
#
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
+
#
|
11
|
+
# Unless required by applicable law or agreed to in writing, software
|
12
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
13
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
14
|
+
# See the License for the specific language governing permissions and
|
15
|
+
# limitations under the License.
|
16
|
+
#
|
17
|
+
require 'chef/knife/hadoop_base'
|
18
|
+
|
19
|
+
class Chef
|
20
|
+
class Knife
|
21
|
+
class HadoopHdfsSnapshot < Knife
|
22
|
+
|
23
|
+
include Knife::HadoopBase
|
24
|
+
|
25
|
+
deps do
|
26
|
+
require 'readline'
|
27
|
+
require 'chef/json_compat'
|
28
|
+
require 'chef/knife/bootstrap'
|
29
|
+
Chef::Knife::Bootstrap.load_deps
|
30
|
+
end
|
31
|
+
|
32
|
+
banner "knife hadoop hdfs snapshot (options)"
|
33
|
+
|
34
|
+
option :dir,
|
35
|
+
:short => "-D DIR",
|
36
|
+
:long => "--directory DIRECTORY",
|
37
|
+
:description => "The HDFS directory",
|
38
|
+
:proc => Proc.new { |f| Chef::Config[:knife][:dir] = f }
|
39
|
+
|
40
|
+
option :table,
|
41
|
+
:short => "-X TABLE",
|
42
|
+
:long => "--database-table TABLE",
|
43
|
+
:description => "The database table to be used",
|
44
|
+
:proc => Proc.new { |f| Chef::Config[:knife][:table] = f }
|
45
|
+
|
46
|
+
def run
|
47
|
+
$stdout.sync = true
|
48
|
+
dataset = db_connection.from("#{Chef::Config[:knife][:table]}")
|
49
|
+
list = hdfs_connection.list("#{Chef::Config[:knife][:dir]}")
|
50
|
+
list.each do |item|
|
51
|
+
item.each do |k, v|
|
52
|
+
if "#{k}" == 'accessTime'
|
53
|
+
@accessTime = item['accessTime']
|
54
|
+
elsif "#{k}" == 'blockSize'
|
55
|
+
@blockSize = item['blockSize']
|
56
|
+
elsif "#{k}" == 'group'
|
57
|
+
@group = item['group']
|
58
|
+
elsif "#{k}" == 'length'
|
59
|
+
@length = item['length']
|
60
|
+
elsif "#{k}" == 'modificationTime'
|
61
|
+
@modificationTime = item['modificationTime']
|
62
|
+
elsif "#{k}" == 'owner'
|
63
|
+
@owner = item['owner']
|
64
|
+
elsif "#{k}" == 'pathSuffix'
|
65
|
+
@pathSuffix = item['pathSuffix']
|
66
|
+
elsif "#{k}" == 'permission'
|
67
|
+
@permission = item['permission']
|
68
|
+
elsif "#{k}" == 'replication'
|
69
|
+
@replication = item['replication']
|
70
|
+
elsif "#{k}" == 'type'
|
71
|
+
@type = item['type']
|
72
|
+
else
|
73
|
+
puts "Cannot read key value pairs. Please debug"
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
77
|
+
db_connection.transaction do
|
78
|
+
dataset.insert(
|
79
|
+
:directory => "#{Chef::Config[:knife][:dir]}",
|
80
|
+
:accessTime => "#{@accessTime}",
|
81
|
+
:blockSize => "#{@blockSize}",
|
82
|
+
:group => "#{@group}",
|
83
|
+
:length => "#{@length}",
|
84
|
+
:modificationTime => "#{@modificationTime}",
|
85
|
+
:owner => "#{@owner}",
|
86
|
+
:pathSuffix => "#{@pathSuffix}",
|
87
|
+
:permission => "#{@permission}",
|
88
|
+
:replication => "#{@replication}",
|
89
|
+
:type => "#{@type}",
|
90
|
+
:created_at => @created_at = Time.now
|
91
|
+
)
|
92
|
+
end
|
93
|
+
end
|
94
|
+
ui.msg ("Created snapshot in #{Chef::Config[:knife][:table]} at #{@created_at}")
|
95
|
+
end
|
96
|
+
end
|
97
|
+
end
|
98
|
+
end
|
99
|
+
|
@@ -0,0 +1,57 @@
|
|
1
|
+
# Author:: Murali Raju (<murali.raju@appliv.com>)
|
2
|
+
# Copyright:: Copyright (c) 2012 Murali Raju.
|
3
|
+
# License:: Apache License, Version 2.0
|
4
|
+
#
|
5
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
6
|
+
# you may not use this file except in compliance with the License.
|
7
|
+
# You may obtain a copy of the License at
|
8
|
+
#
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
+
#
|
11
|
+
# Unless required by applicable law or agreed to in writing, software
|
12
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
13
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
14
|
+
# See the License for the specific language governing permissions and
|
15
|
+
# limitations under the License.
|
16
|
+
#
|
17
|
+
require 'chef/knife/hadoop_base'
|
18
|
+
|
19
|
+
class Chef
|
20
|
+
class Knife
|
21
|
+
class HadoopHdfsUpdate < Knife
|
22
|
+
|
23
|
+
include Knife::HadoopBase
|
24
|
+
|
25
|
+
deps do
|
26
|
+
require 'readline'
|
27
|
+
require 'chef/json_compat'
|
28
|
+
require 'chef/knife/bootstrap'
|
29
|
+
Chef::Knife::Bootstrap.load_deps
|
30
|
+
end
|
31
|
+
|
32
|
+
banner "knife hadoop hdfs update (options)"
|
33
|
+
|
34
|
+
|
35
|
+
option :path,
|
36
|
+
:short => "-O ORIGINALPATH",
|
37
|
+
:long => "--original-path ORIGINALPATH",
|
38
|
+
:description => "The original HDFS path",
|
39
|
+
:proc => Proc.new { |f| Chef::Config[:knife][:path] = f }
|
40
|
+
|
41
|
+
option :newpath,
|
42
|
+
:short => "-N NEWPATH",
|
43
|
+
:long => "--new-path NEWPATH",
|
44
|
+
:description => "The new HDFS path",
|
45
|
+
:proc => Proc.new { |f| Chef::Config[:knife][:newpath] = f }
|
46
|
+
|
47
|
+
|
48
|
+
def run
|
49
|
+
$stdout.sync = true
|
50
|
+
|
51
|
+
hdfs_connection.rename("#{Chef::Config[:knife][:path]}",
|
52
|
+
"#{Chef::Config[:knife][:newpath]}")
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
@@ -0,0 +1,69 @@
|
|
1
|
+
# Author:: Murali Raju (<murali.raju@appliv.com>)
|
2
|
+
# Copyright:: Copyright (c) 2012 Murali Raju.
|
3
|
+
# License:: Apache License, Version 2.0
|
4
|
+
#
|
5
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
6
|
+
# you may not use this file except in compliance with the License.
|
7
|
+
# You may obtain a copy of the License at
|
8
|
+
#
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
+
#
|
11
|
+
# Unless required by applicable law or agreed to in writing, software
|
12
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
13
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
14
|
+
# See the License for the specific language governing permissions and
|
15
|
+
# limitations under the License.
|
16
|
+
#
|
17
|
+
require 'chef/knife/hadoop_base'
|
18
|
+
|
19
|
+
class Chef
|
20
|
+
class Knife
|
21
|
+
class HadoopMapredJobKill < Knife
|
22
|
+
|
23
|
+
include Knife::HadoopBase
|
24
|
+
|
25
|
+
deps do
|
26
|
+
require 'readline'
|
27
|
+
require 'chef/json_compat'
|
28
|
+
require 'chef/knife/bootstrap'
|
29
|
+
Chef::Knife::Bootstrap.load_deps
|
30
|
+
end
|
31
|
+
|
32
|
+
banner "knife hadoop mapred job kill (options)"
|
33
|
+
|
34
|
+
option :filter,
|
35
|
+
:short => "-F FILTER",
|
36
|
+
:long => "--filter FILTER",
|
37
|
+
:description => "Kill Job by either <id,name>",
|
38
|
+
:proc => Proc.new { |f| Chef::Config[:knife][:filter] = f }
|
39
|
+
|
40
|
+
option :jobid,
|
41
|
+
:short => "-J JOBID",
|
42
|
+
:long => "--job-id JOBID",
|
43
|
+
:description => "The MapReduce JobID",
|
44
|
+
:proc => Proc.new { |f| Chef::Config[:knife][:jobid] = f }
|
45
|
+
|
46
|
+
option :jobname,
|
47
|
+
:short => "-N JOBNAME",
|
48
|
+
:long => "--job-name JOBNAME",
|
49
|
+
:description => "The MapReduce JobNAME",
|
50
|
+
:proc => Proc.new { |f| Chef::Config[:knife][:jobname] = f }
|
51
|
+
|
52
|
+
def run
|
53
|
+
$stdout.sync = true
|
54
|
+
|
55
|
+
Chef::Log.debug("username: #{Chef::Config[:knife][:mapred_mgmt_host]}")
|
56
|
+
Chef::Log.debug("password: #{Chef::Config[:knife][:mapred_mgmt_port]}")
|
57
|
+
|
58
|
+
filter = "#{Chef::Config[:knife][:filter]}".downcase
|
59
|
+
case filter
|
60
|
+
when 'id'
|
61
|
+
RestClient.delete "http://#{Chef::Config[:knife][:mapred_mgmt_host]}:#{Chef::Config[:knife][:mapred_mgmt_port]}/job/kill/id/#{Chef::Config[:knife][:jobid]}"
|
62
|
+
when 'name'
|
63
|
+
RestClient.delete "http://#{Chef::Config[:knife][:mapred_mgmt_host]}:#{Chef::Config[:knife][:mapred_mgmt_port]}/job/kill/name/#{Chef::Config[:knife][:jobname]}"
|
64
|
+
end
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
@@ -0,0 +1,102 @@
|
|
1
|
+
# Author:: Murali Raju (<murali.raju@appliv.com>)
|
2
|
+
# Copyright:: Copyright (c) 2012 Murali Raju.
|
3
|
+
# License:: Apache License, Version 2.0
|
4
|
+
#
|
5
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
6
|
+
# you may not use this file except in compliance with the License.
|
7
|
+
# You may obtain a copy of the License at
|
8
|
+
#
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
+
#
|
11
|
+
# Unless required by applicable law or agreed to in writing, software
|
12
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
13
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
14
|
+
# See the License for the specific language governing permissions and
|
15
|
+
# limitations under the License.
|
16
|
+
#
|
17
|
+
require 'chef/knife/hadoop_base'
|
18
|
+
|
19
|
+
class Chef
|
20
|
+
class Knife
|
21
|
+
class HadoopMapredJobList < Knife
|
22
|
+
|
23
|
+
include Knife::HadoopBase
|
24
|
+
|
25
|
+
deps do
|
26
|
+
require 'readline'
|
27
|
+
require 'chef/json_compat'
|
28
|
+
require 'chef/knife/bootstrap'
|
29
|
+
Chef::Knife::Bootstrap.load_deps
|
30
|
+
end
|
31
|
+
|
32
|
+
banner "knife hadoop mapred job list (options)"
|
33
|
+
|
34
|
+
option :filter,
|
35
|
+
:short => "-F FILTER",
|
36
|
+
:long => "--filter FILTER",
|
37
|
+
:description => "List MapReduce jobs <all,user>",
|
38
|
+
:proc => Proc.new { |f| Chef::Config[:knife][:filter] = f }
|
39
|
+
|
40
|
+
option :name,
|
41
|
+
:short => "-N NAME",
|
42
|
+
:long => "--name NAME",
|
43
|
+
:description => "List by User Name",
|
44
|
+
:proc => Proc.new { |f| Chef::Config[:knife][:name] = f }
|
45
|
+
|
46
|
+
def run
|
47
|
+
$stdout.sync = true
|
48
|
+
|
49
|
+
Chef::Log.debug("username: #{Chef::Config[:knife][:mapred_mgmt_host]}")
|
50
|
+
Chef::Log.debug("password: #{Chef::Config[:knife][:mapred_mgmt_port]}")
|
51
|
+
|
52
|
+
job_list = [
|
53
|
+
ui.color('jobid', :bold),
|
54
|
+
ui.color('mapComplete', :bold),
|
55
|
+
ui.color('name', :bold),
|
56
|
+
ui.color('priority', :bold),
|
57
|
+
ui.color('reduceComplete', :bold),
|
58
|
+
ui.color('schedulingInfo', :bold),
|
59
|
+
ui.color('startTime', :bold),
|
60
|
+
ui.color('state', :bold),
|
61
|
+
ui.color('user', :bold)
|
62
|
+
]
|
63
|
+
|
64
|
+
filter = "#{Chef::Config[:knife][:filter]}".downcase
|
65
|
+
case filter
|
66
|
+
when 'all'
|
67
|
+
response = RestClient.get "http://#{Chef::Config[:knife][:mapred_mgmt_host]}:#{Chef::Config[:knife][:mapred_mgmt_port]}/job/list"
|
68
|
+
collection = JSON.parse(response)
|
69
|
+
collection.each do |item|
|
70
|
+
job_list << item['jobid']
|
71
|
+
job_list << item['mapComplete']
|
72
|
+
job_list << item['name']
|
73
|
+
job_list << item['priority']
|
74
|
+
job_list << item['reduceComplete']
|
75
|
+
job_list << item['schedulingInfo']
|
76
|
+
job_list << item['startTime']
|
77
|
+
job_list << item['state']
|
78
|
+
job_list << item['user']
|
79
|
+
end
|
80
|
+
when 'user'
|
81
|
+
response = RestClient.get "http://#{Chef::Config[:knife][:mapred_mgmt_host]}:#{Chef::Config[:knife][:mapred_mgmt_port]}/job/list"
|
82
|
+
collection = JSON.parse(response)
|
83
|
+
collection.each do |item|
|
84
|
+
if "#{Chef::Config[:knife][:name]}" == item['user']
|
85
|
+
job_list << item['jobid']
|
86
|
+
job_list << item['mapComplete']
|
87
|
+
job_list << item['name']
|
88
|
+
job_list << item['priority']
|
89
|
+
job_list << item['reduceComplete']
|
90
|
+
job_list << item['schedulingInfo']
|
91
|
+
job_list << item['startTime']
|
92
|
+
job_list << item['state']
|
93
|
+
job_list << item['user']
|
94
|
+
end
|
95
|
+
end
|
96
|
+
end
|
97
|
+
puts ui.list(job_list, :uneven_columns_across, 9)
|
98
|
+
end
|
99
|
+
end
|
100
|
+
end
|
101
|
+
end
|
102
|
+
|
@@ -0,0 +1,73 @@
|
|
1
|
+
# Author:: Murali Raju (<murali.raju@appliv.com>)
|
2
|
+
# Copyright:: Copyright (c) 2012 Murali Raju.
|
3
|
+
# License:: Apache License, Version 2.0
|
4
|
+
#
|
5
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
6
|
+
# you may not use this file except in compliance with the License.
|
7
|
+
# You may obtain a copy of the License at
|
8
|
+
#
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
+
#
|
11
|
+
# Unless required by applicable law or agreed to in writing, software
|
12
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
13
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
14
|
+
# See the License for the specific language governing permissions and
|
15
|
+
# limitations under the License.
|
16
|
+
#
|
17
|
+
require 'chef/knife/hadoop_base'
|
18
|
+
|
19
|
+
class Chef
|
20
|
+
class Knife
|
21
|
+
class HadoopSetup < Knife
|
22
|
+
|
23
|
+
include Knife::HadoopBase
|
24
|
+
|
25
|
+
deps do
|
26
|
+
require 'readline'
|
27
|
+
require 'chef/json_compat'
|
28
|
+
require 'chef/knife/bootstrap'
|
29
|
+
Chef::Knife::Bootstrap.load_deps
|
30
|
+
end
|
31
|
+
|
32
|
+
banner "knife hadoop setup (options)"
|
33
|
+
|
34
|
+
option :type,
|
35
|
+
:short => "-T TYPE",
|
36
|
+
:long => "--setup-type SETUP TYPE",
|
37
|
+
:description => "The setup type <db,cluster>",
|
38
|
+
:proc => Proc.new { |f| Chef::Config[:knife][:type] = f }
|
39
|
+
|
40
|
+
|
41
|
+
option :table,
|
42
|
+
:short => "-X TABLE",
|
43
|
+
:long => "--database-table TABLE",
|
44
|
+
:description => "The database table to be created",
|
45
|
+
:proc => Proc.new { |f| Chef::Config[:knife][:table] = f }
|
46
|
+
|
47
|
+
def run
|
48
|
+
$stdout.sync = true
|
49
|
+
|
50
|
+
type = "#{Chef::Config[:knife][:type]}".downcase
|
51
|
+
case type
|
52
|
+
when 'db'
|
53
|
+
ui.msg("Creating table #{Chef::Config[:knife][:table]}")
|
54
|
+
db_connection.create_table "#{Chef::Config[:knife][:table]}" do
|
55
|
+
String :directory
|
56
|
+
String :accessTime
|
57
|
+
String :blockSize
|
58
|
+
String :group
|
59
|
+
String :length
|
60
|
+
String :modificationTime
|
61
|
+
String :owner
|
62
|
+
String :pathSuffix
|
63
|
+
String :permission
|
64
|
+
String :replication
|
65
|
+
String :type
|
66
|
+
DateTime :created_at
|
67
|
+
end
|
68
|
+
end
|
69
|
+
end
|
70
|
+
end
|
71
|
+
end
|
72
|
+
end
|
73
|
+
|
metadata
ADDED
@@ -0,0 +1,159 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: knife-hadoop
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.0.4
|
5
|
+
prerelease:
|
6
|
+
platform: ruby
|
7
|
+
authors:
|
8
|
+
- Murali Raju
|
9
|
+
autorequire:
|
10
|
+
bindir: bin
|
11
|
+
cert_chain: []
|
12
|
+
date: 2013-02-21 00:00:00.000000000 Z
|
13
|
+
dependencies:
|
14
|
+
- !ruby/object:Gem::Dependency
|
15
|
+
name: webhdfs
|
16
|
+
requirement: !ruby/object:Gem::Requirement
|
17
|
+
none: false
|
18
|
+
requirements:
|
19
|
+
- - ! '>='
|
20
|
+
- !ruby/object:Gem::Version
|
21
|
+
version: '0'
|
22
|
+
type: :runtime
|
23
|
+
prerelease: false
|
24
|
+
version_requirements: !ruby/object:Gem::Requirement
|
25
|
+
none: false
|
26
|
+
requirements:
|
27
|
+
- - ! '>='
|
28
|
+
- !ruby/object:Gem::Version
|
29
|
+
version: '0'
|
30
|
+
- !ruby/object:Gem::Dependency
|
31
|
+
name: pg
|
32
|
+
requirement: !ruby/object:Gem::Requirement
|
33
|
+
none: false
|
34
|
+
requirements:
|
35
|
+
- - ! '>='
|
36
|
+
- !ruby/object:Gem::Version
|
37
|
+
version: '0'
|
38
|
+
type: :runtime
|
39
|
+
prerelease: false
|
40
|
+
version_requirements: !ruby/object:Gem::Requirement
|
41
|
+
none: false
|
42
|
+
requirements:
|
43
|
+
- - ! '>='
|
44
|
+
- !ruby/object:Gem::Version
|
45
|
+
version: '0'
|
46
|
+
- !ruby/object:Gem::Dependency
|
47
|
+
name: sequel
|
48
|
+
requirement: !ruby/object:Gem::Requirement
|
49
|
+
none: false
|
50
|
+
requirements:
|
51
|
+
- - ! '>='
|
52
|
+
- !ruby/object:Gem::Version
|
53
|
+
version: '0'
|
54
|
+
type: :runtime
|
55
|
+
prerelease: false
|
56
|
+
version_requirements: !ruby/object:Gem::Requirement
|
57
|
+
none: false
|
58
|
+
requirements:
|
59
|
+
- - ! '>='
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: '0'
|
62
|
+
- !ruby/object:Gem::Dependency
|
63
|
+
name: debugger
|
64
|
+
requirement: !ruby/object:Gem::Requirement
|
65
|
+
none: false
|
66
|
+
requirements:
|
67
|
+
- - ! '>='
|
68
|
+
- !ruby/object:Gem::Version
|
69
|
+
version: '0'
|
70
|
+
type: :runtime
|
71
|
+
prerelease: false
|
72
|
+
version_requirements: !ruby/object:Gem::Requirement
|
73
|
+
none: false
|
74
|
+
requirements:
|
75
|
+
- - ! '>='
|
76
|
+
- !ruby/object:Gem::Version
|
77
|
+
version: '0'
|
78
|
+
- !ruby/object:Gem::Dependency
|
79
|
+
name: rest-client
|
80
|
+
requirement: !ruby/object:Gem::Requirement
|
81
|
+
none: false
|
82
|
+
requirements:
|
83
|
+
- - ! '>='
|
84
|
+
- !ruby/object:Gem::Version
|
85
|
+
version: '0'
|
86
|
+
type: :runtime
|
87
|
+
prerelease: false
|
88
|
+
version_requirements: !ruby/object:Gem::Requirement
|
89
|
+
none: false
|
90
|
+
requirements:
|
91
|
+
- - ! '>='
|
92
|
+
- !ruby/object:Gem::Version
|
93
|
+
version: '0'
|
94
|
+
- !ruby/object:Gem::Dependency
|
95
|
+
name: chef
|
96
|
+
requirement: !ruby/object:Gem::Requirement
|
97
|
+
none: false
|
98
|
+
requirements:
|
99
|
+
- - ! '>='
|
100
|
+
- !ruby/object:Gem::Version
|
101
|
+
version: '0'
|
102
|
+
type: :runtime
|
103
|
+
prerelease: false
|
104
|
+
version_requirements: !ruby/object:Gem::Requirement
|
105
|
+
none: false
|
106
|
+
requirements:
|
107
|
+
- - ! '>='
|
108
|
+
- !ruby/object:Gem::Version
|
109
|
+
version: '0'
|
110
|
+
description: Hadoop Chef Knife Plugin
|
111
|
+
email:
|
112
|
+
- murraju@appliv.com
|
113
|
+
executables: []
|
114
|
+
extensions: []
|
115
|
+
extra_rdoc_files:
|
116
|
+
- README.md
|
117
|
+
- LICENSE
|
118
|
+
files:
|
119
|
+
- Gemfile
|
120
|
+
- Gemfile.lock
|
121
|
+
- LICENSE
|
122
|
+
- README.md
|
123
|
+
- Rakefile
|
124
|
+
- configs/hosfw.json
|
125
|
+
- knife-hadoop.gemspec
|
126
|
+
- lib/chef/knife/hadoop_base.rb
|
127
|
+
- lib/chef/knife/hadoop_hdfs_create.rb
|
128
|
+
- lib/chef/knife/hadoop_hdfs_list.rb
|
129
|
+
- lib/chef/knife/hadoop_hdfs_snapshot.rb
|
130
|
+
- lib/chef/knife/hadoop_hdfs_update.rb
|
131
|
+
- lib/chef/knife/hadoop_mapred_job_kill.rb
|
132
|
+
- lib/chef/knife/hadoop_mapred_job_list.rb
|
133
|
+
- lib/chef/knife/hadoop_setup.rb
|
134
|
+
- lib/knife-hadoop/version.rb
|
135
|
+
homepage: https://github.com/murraju/knife-hadoop
|
136
|
+
licenses: []
|
137
|
+
post_install_message:
|
138
|
+
rdoc_options: []
|
139
|
+
require_paths:
|
140
|
+
- lib
|
141
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
142
|
+
none: false
|
143
|
+
requirements:
|
144
|
+
- - ! '>='
|
145
|
+
- !ruby/object:Gem::Version
|
146
|
+
version: '0'
|
147
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
148
|
+
none: false
|
149
|
+
requirements:
|
150
|
+
- - ! '>='
|
151
|
+
- !ruby/object:Gem::Version
|
152
|
+
version: '0'
|
153
|
+
requirements: []
|
154
|
+
rubyforge_project:
|
155
|
+
rubygems_version: 1.8.23
|
156
|
+
signing_key:
|
157
|
+
specification_version: 3
|
158
|
+
summary: Hadoop Chef Knife Plugin
|
159
|
+
test_files: []
|