wukong 0.1.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (143) hide show
  1. data/LICENSE.textile +107 -0
  2. data/README.textile +166 -0
  3. data/bin/cutc +30 -0
  4. data/bin/cuttab +5 -0
  5. data/bin/greptrue +8 -0
  6. data/bin/hdp-cat +3 -0
  7. data/bin/hdp-catd +3 -0
  8. data/bin/hdp-du +81 -0
  9. data/bin/hdp-get +3 -0
  10. data/bin/hdp-kill +3 -0
  11. data/bin/hdp-ls +10 -0
  12. data/bin/hdp-mkdir +3 -0
  13. data/bin/hdp-mv +3 -0
  14. data/bin/hdp-parts_to_keys.rb +77 -0
  15. data/bin/hdp-ps +3 -0
  16. data/bin/hdp-put +3 -0
  17. data/bin/hdp-rm +11 -0
  18. data/bin/hdp-sort +29 -0
  19. data/bin/hdp-stream +29 -0
  20. data/bin/hdp-stream-flat +18 -0
  21. data/bin/hdp-sync +17 -0
  22. data/bin/hdp-wc +67 -0
  23. data/bin/md5sort +20 -0
  24. data/bin/tabchar +5 -0
  25. data/bin/uniqc +3 -0
  26. data/bin/wu-hist +3 -0
  27. data/bin/wu-lign +177 -0
  28. data/bin/wu-sum +30 -0
  29. data/doc/INSTALL.textile +41 -0
  30. data/doc/LICENSE.textile +107 -0
  31. data/doc/README-tutorial.textile +163 -0
  32. data/doc/README-wulign.textile +59 -0
  33. data/doc/README-wutils.textile +128 -0
  34. data/doc/TODO.textile +61 -0
  35. data/doc/UsingWukong-part1-setup.textile +2 -0
  36. data/doc/UsingWukong-part2-scraping.textile +2 -0
  37. data/doc/UsingWukong-part3-parsing.textile +132 -0
  38. data/doc/code/api_response_example.txt +20 -0
  39. data/doc/code/parser_skeleton.rb +38 -0
  40. data/doc/hadoop-nfs.textile +51 -0
  41. data/doc/hadoop-setup.textile +29 -0
  42. data/doc/index.textile +124 -0
  43. data/doc/intro_to_map_reduce/MapReduceDiagram.graffle +0 -0
  44. data/doc/links.textile +42 -0
  45. data/doc/overview.textile +91 -0
  46. data/doc/pig/PigLatinExpressionsList.txt +122 -0
  47. data/doc/pig/PigLatinReferenceManual.html +19134 -0
  48. data/doc/pig/PigLatinReferenceManual.txt +1640 -0
  49. data/doc/tips.textile +116 -0
  50. data/doc/usage.textile +102 -0
  51. data/doc/utils.textile +48 -0
  52. data/examples/README.txt +17 -0
  53. data/examples/and_pig/sample_queries.rb +128 -0
  54. data/examples/apache_log_parser.rb +53 -0
  55. data/examples/count_keys.rb +56 -0
  56. data/examples/count_keys_at_mapper.rb +57 -0
  57. data/examples/graph/adjacency_list.rb +74 -0
  58. data/examples/graph/breadth_first_search.rb +79 -0
  59. data/examples/graph/gen_2paths.rb +68 -0
  60. data/examples/graph/gen_multi_edge.rb +103 -0
  61. data/examples/graph/gen_symmetric_links.rb +53 -0
  62. data/examples/package-local.rb +100 -0
  63. data/examples/package.rb +96 -0
  64. data/examples/pagerank/README.textile +6 -0
  65. data/examples/pagerank/gen_initial_pagerank_graph.pig +57 -0
  66. data/examples/pagerank/pagerank.rb +88 -0
  67. data/examples/pagerank/pagerank_initialize.rb +46 -0
  68. data/examples/pagerank/run_pagerank.sh +19 -0
  69. data/examples/rank_and_bin.rb +173 -0
  70. data/examples/run_all.sh +47 -0
  71. data/examples/sample_records.rb +44 -0
  72. data/examples/size.rb +60 -0
  73. data/examples/word_count.rb +95 -0
  74. data/lib/wukong.rb +11 -0
  75. data/lib/wukong/and_pig.rb +62 -0
  76. data/lib/wukong/and_pig/README.textile +12 -0
  77. data/lib/wukong/and_pig/as.rb +37 -0
  78. data/lib/wukong/and_pig/data_types.rb +30 -0
  79. data/lib/wukong/and_pig/functions.rb +50 -0
  80. data/lib/wukong/and_pig/generate.rb +85 -0
  81. data/lib/wukong/and_pig/generate/variable_inflections.rb +82 -0
  82. data/lib/wukong/and_pig/junk.rb +51 -0
  83. data/lib/wukong/and_pig/operators.rb +8 -0
  84. data/lib/wukong/and_pig/operators/compound.rb +29 -0
  85. data/lib/wukong/and_pig/operators/evaluators.rb +7 -0
  86. data/lib/wukong/and_pig/operators/execution.rb +15 -0
  87. data/lib/wukong/and_pig/operators/file_methods.rb +29 -0
  88. data/lib/wukong/and_pig/operators/foreach.rb +98 -0
  89. data/lib/wukong/and_pig/operators/groupies.rb +212 -0
  90. data/lib/wukong/and_pig/operators/load_store.rb +65 -0
  91. data/lib/wukong/and_pig/operators/meta.rb +42 -0
  92. data/lib/wukong/and_pig/operators/relational.rb +129 -0
  93. data/lib/wukong/and_pig/pig_struct.rb +48 -0
  94. data/lib/wukong/and_pig/pig_var.rb +95 -0
  95. data/lib/wukong/and_pig/symbol.rb +29 -0
  96. data/lib/wukong/and_pig/utils.rb +0 -0
  97. data/lib/wukong/bad_record.rb +18 -0
  98. data/lib/wukong/boot.rb +47 -0
  99. data/lib/wukong/datatypes.rb +24 -0
  100. data/lib/wukong/datatypes/enum.rb +123 -0
  101. data/lib/wukong/dfs.rb +80 -0
  102. data/lib/wukong/encoding.rb +111 -0
  103. data/lib/wukong/extensions.rb +15 -0
  104. data/lib/wukong/extensions/array.rb +18 -0
  105. data/lib/wukong/extensions/blank.rb +93 -0
  106. data/lib/wukong/extensions/class.rb +189 -0
  107. data/lib/wukong/extensions/date_time.rb +24 -0
  108. data/lib/wukong/extensions/emittable.rb +82 -0
  109. data/lib/wukong/extensions/hash.rb +120 -0
  110. data/lib/wukong/extensions/hash_like.rb +119 -0
  111. data/lib/wukong/extensions/hashlike_class.rb +47 -0
  112. data/lib/wukong/extensions/module.rb +2 -0
  113. data/lib/wukong/extensions/pathname.rb +27 -0
  114. data/lib/wukong/extensions/string.rb +65 -0
  115. data/lib/wukong/extensions/struct.rb +17 -0
  116. data/lib/wukong/extensions/symbol.rb +11 -0
  117. data/lib/wukong/logger.rb +53 -0
  118. data/lib/wukong/models/graph.rb +27 -0
  119. data/lib/wukong/rdf.rb +104 -0
  120. data/lib/wukong/schema.rb +37 -0
  121. data/lib/wukong/script.rb +265 -0
  122. data/lib/wukong/script/hadoop_command.rb +111 -0
  123. data/lib/wukong/script/local_command.rb +14 -0
  124. data/lib/wukong/streamer.rb +13 -0
  125. data/lib/wukong/streamer/accumulating_reducer.rb +89 -0
  126. data/lib/wukong/streamer/base.rb +76 -0
  127. data/lib/wukong/streamer/count_keys.rb +30 -0
  128. data/lib/wukong/streamer/count_lines.rb +26 -0
  129. data/lib/wukong/streamer/filter.rb +20 -0
  130. data/lib/wukong/streamer/line_streamer.rb +12 -0
  131. data/lib/wukong/streamer/list_reducer.rb +20 -0
  132. data/lib/wukong/streamer/preprocess_with_pipe_streamer.rb +22 -0
  133. data/lib/wukong/streamer/rank_and_bin_reducer.rb +145 -0
  134. data/lib/wukong/streamer/set_reducer.rb +14 -0
  135. data/lib/wukong/streamer/struct_streamer.rb +48 -0
  136. data/lib/wukong/streamer/summing_reducer.rb +29 -0
  137. data/lib/wukong/streamer/uniq_by_last_reducer.rb +44 -0
  138. data/lib/wukong/typed_struct.rb +12 -0
  139. data/lib/wukong/wukong_class.rb +21 -0
  140. data/spec/bin/hdp-wc_spec.rb +4 -0
  141. data/spec/spec_helper.rb +0 -0
  142. data/wukong.gemspec +179 -0
  143. metadata +214 -0
data/LICENSE.textile ADDED
@@ -0,0 +1,107 @@
1
+ ---
2
+ layout: default
3
+ title: Apache License
4
+ ---
5
+
6
+
7
+ h1(gemheader). {{ site.gemname }} %(small):: license%
8
+
9
+
10
+ The wukong code is __Copyright (c) 2009 Philip (flip) Kromer__
11
+
12
+ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
13
+
14
+ http://www.apache.org/licenses/LICENSE-2.0
15
+
16
+ Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an **AS IS** BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
17
+
18
+ h1. Apache License
19
+
20
+ Apache License
21
+ Version 2.0, January 2004
22
+ http://www.apache.org/licenses/
23
+
24
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
25
+
26
+ <notextile><div class="toggle"></notextile>
27
+
28
+ h2. 1. Definitions.
29
+
30
+ * **License** shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
31
+ * **Licensor** shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
32
+ * **Legal Entity** shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, **control** means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
33
+ * **You** (or **Your**) shall mean an individual or Legal Entity exercising permissions granted by this License.
34
+ * **Source** form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
35
+ * **Object** form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
36
+ * **Work** shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
37
+ * **Derivative Works** shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
38
+ * **Contribution** shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, **submitted** means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
39
+ * **Contributor** shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
40
+
41
+ <notextile></div><div class="toggle"></notextile>
42
+
43
+ h2. 2. Grant of Copyright License.
44
+
45
+ Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
46
+
47
+
48
+ <notextile></div><div class="toggle"></notextile>
49
+
50
+ h2. 3. Grant of Patent License.
51
+
52
+ Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
53
+
54
+ <notextile></div><div class="toggle"></notextile>
55
+
56
+ h2. 4. Redistribution.
57
+
58
+ You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
59
+
60
+ # You must give any other recipients of the Work or Derivative Works a copy of this License; and
61
+ # You must cause any modified files to carry prominent notices stating that You changed the files; and
62
+ # You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
63
+ # If the Work includes a __NOTICE__ text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
64
+
65
+ You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
66
+
67
+ <notextile></div><div class="toggle"></notextile>
68
+
69
+ h2. 5. Submission of Contributions.
70
+
71
+ Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
72
+
73
+ <notextile></div><div class="toggle"></notextile>
74
+
75
+ h2. 6. Trademarks.
76
+
77
+ This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
78
+
79
+ <notextile></div><div class="toggle"></notextile>
80
+
81
+ h2. 7. Disclaimer of Warranty.
82
+
83
+ Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an **AS IS** BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
84
+
85
+ <notextile></div><div class="toggle"></notextile>
86
+
87
+ h2. 8. Limitation of Liability.
88
+
89
+ In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
90
+
91
+ <notextile></div><div class="toggle"></notextile>
92
+
93
+ h2. 9. Accepting Warranty or Additional Liability.
94
+
95
+ While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
96
+
97
+ END OF TERMS AND CONDITIONS
98
+
99
+ <notextile></div><div class="toggle"></notextile>
100
+
101
+ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
102
+
103
+ http://www.apache.org/licenses/LICENSE-2.0
104
+
105
+ Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an **AS IS** BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
106
+
107
+ <notextile></div></notextile>
data/README.textile ADDED
@@ -0,0 +1,166 @@
1
+ h1. Wukong
2
+
3
+ Wukong makes "Hadoop":http://hadoop.apache.org/core so easy a chimpanzee can use it.
4
+
5
+ Treat your dataset as a
6
+
7
+ * stream of lines when it's efficient to process by lines
8
+ * stream of field arrays when it's efficient to deal directly with fields
9
+ * stream of lightweight objects when it's efficient to deal with objects
10
+
11
+ Wukong is friends with "Hadoop":http://hadoop.apache.org/core the elephant, "Pig":http://hadoop.apache.org/pig/ the query language, and the @cat@ on your command line.
12
+
13
+ The main documentation -- including tutorials and tips for working with big data -- lives on the "Wukong Pages":http://mrflip.github.com/wukong and there is some supplemental information on the "wukong wiki.":http://wiki.github.com/mrflip/wukong
14
+
15
+ h2. How to write a Wukong script
16
+
17
+ Here's a script to count words in a text stream:
18
+
19
+ <pre><code> require 'wukong'
20
+ module WordCount
21
+ class Mapper < Wukong::Streamer::LineStreamer
22
+ # Emit each word in the line.
23
+ def process line
24
+ words = line.strip.split(/\W+/).reject(&:blank?)
25
+ words.each{|word| yield [word, 1] }
26
+ end
27
+ end
28
+
29
+ class Reducer < Wukong::Streamer::ListReducer
30
+ def finalize
31
+ yield [ key, values.map(&:last).map(&:to_i).sum ]
32
+ end
33
+ end
34
+ end
35
+
36
+ Wukong::Script.new(
37
+ WordCount::Mapper,
38
+ WordCount::Reducer
39
+ ).run # Execute the script
40
+ </code></pre>
41
+
42
+ The first class, the Mapper, eats lines and craps @[word, count]@ records: word is the /key/, its count is the /value/.
43
+
44
+ In the reducer, the values for each key are stacked up into a list; then the record(s) yielded by @#finalize@ are emitted. There are many other ways to write the reducer (most of them are better) -- see the ["examples":examples/]
45
+
46
+ h3. Structured data stream
47
+
48
+ You can also use structs to treat your dataset as a stream of objects:
49
+
50
+ <pre><code> require 'wukong'
51
+ require 'my_blog' #defines the blog models
52
+ # structs for our input objects
53
+ Tweet = Struct.new( :id, :created_at, :twitter_user_id,
54
+ :in_reply_to_user_id, :in_reply_to_status_id, :text )
55
+ TwitterUser = Struct.new( :id, :username, :fullname,
56
+ :homepage, :location, :description )
57
+ module TwitBlog
58
+ class Mapper < Wukong::Streamer::RecordStreamer
59
+ # Watch for tweets by me
60
+ MY_USER_ID = 24601
61
+ #
62
+ # If this is a tweet is by me, convert it to a Post.
63
+ #
64
+ # If it is a tweet not by me, convert it to a Comment that
65
+ # will be paired with the correct Post.
66
+ #
67
+ # If it is a TwitterUser, convert it to a User record and
68
+ # a user_location record
69
+ #
70
+ def process record
71
+ case record
72
+ when TwitterUser
73
+ user = MyBlog::User.new.merge(record) # grab the fields in common
74
+ user_loc = MyBlog::UserLoc.new(record.id, record.location, nil, nil)
75
+ yield user
76
+ yield user_loc
77
+ when Tweet
78
+ if record.twitter_user_id == MY_USER_ID
79
+ post = MyBlog::Post.new.merge record
80
+ post.link = "http://twitter.com/statuses/show/#{record.id}"
81
+ post.body = record.text
82
+ post.title = record.text[0..65] + "..."
83
+ yield post
84
+ else
85
+ comment = MyBlog::Comment.new.merge record
86
+ comment.body = record.text
87
+ comment.post_id = record.in_reply_to_status_id
88
+ yield comment
89
+ end
90
+ end
91
+ end
92
+ end
93
+ end
94
+ Wukong::Script.new( TwitBlog::Mapper, nil ).run # identity reducer
95
+ </code></pre>
96
+
97
+ h3. More info
98
+
99
+ There are many useful examples (including an actually-useful version of the WordCount script) in examples/ directory.
100
+
101
+ h2. Setup
102
+
103
+ 1. Allow Wukong to discover where his elephant friend lives: either
104
+
105
+ * set a @$HADOOP_HOME@ environment variable,
106
+
107
+ * or create a file 'config/wukong-site.yaml' with a line that points to the top-level directory of your hadoop install:
108
+
109
+ @:hadoop_home: /usr/local/share/hadoop@
110
+
111
+ 2. Add wukong's @bin/@ directory to your $PATH, so that you may use its filesystem shortcuts.
112
+
113
+
114
+ h2. How to run a Wukong script
115
+
116
+ To run your script using local files and no connection to a hadoop cluster,
117
+
118
+ @your/script.rb --run=local path/to/input_files path/to/output_dir@
119
+
120
+ To run the command across a Hadoop cluster,
121
+
122
+ @your/script.rb --run=hadoop path/to/input_files path/to/output_dir@
123
+
124
+ You can set the default in the config/wukong-site.yaml file, and then just use @--run@ instead of @--run=something@ --it will just use the default run mode.
125
+
126
+ If you're running @--run=hadoop@, all file paths are HDFS paths. If you're running @--run=local@, all file paths are local paths. (your/script path, of course, lives on the local filesystem).
127
+
128
+ You can supply arbitrary command line arguments (they wind up as key-value pairs in the options path your mapper and reducer receive), and you can use the hadoop syntax to specify more than one input file:
129
+
130
+ ./path/to/your/script.rb --any_specific_options --options=can_have_vals \
131
+ --run "input_dir/part_*,input_file2.tsv,etc.tsv" path/to/output_dir
132
+
133
+ Note that all @--options@ must precede (in any order) all non-options.
134
+
135
+ h2. How to test your scripts
136
+
137
+ To run mapper on its own:
138
+
139
+ cat ./local/test/input.tsv | ./examples/word_count.rb --map | more
140
+
141
+ or if your test data lies on the HDFS,
142
+
143
+ hdp-cat test/input.tsv | ./examples/word_count.rb --map | more
144
+
145
+ Next graduate to running @--run=local@ mode so you can inspect the reducer.
146
+
147
+
148
+ h2. What's up with Wukong::AndPig?
149
+
150
+ @Wukong::AndPig@ is a small library to more easily generate code for the
151
+ "Pig":http://hadoop.apache.org/pig data analysis language. See its
152
+ "README":wukong/and_pig/README.textile for more.
153
+
154
+ h2. Why is it called Wukong?
155
+
156
+ Hadoop, as you may know, is "named after a stuffed elephant.":http://en.wikipedia.org/wiki/Hadoop Since Wukong was started by the "infochimps":http://infochimps.org team, we needed a simian analog. A Monkey King who journeyed to the land of the Elephant seems to fit the bill:
157
+
158
+ bq. Sun Wukong (孙悟空), known in the West as the Monkey King, is the main character in the classical Chinese epic novel Journey to the West. In the novel, he accompanies the monk Xuanzang on the journey to retrieve Buddhist sutras from India.
159
+
160
+ bq. Sun Wukong possesses incredible strength, being able to lift his 13,500 jīn (8,100 kg) Ruyi Jingu Bang with ease. He also has superb speed, traveling 108,000 li (54,000 kilometers) in one somersault. Sun knows 72 transformations, which allows him to transform into various animals and objects; he is, however, shown with slight problems transforming into other people, since he is unable to complete the transformation of his tail. He is a skilled fighter, capable of holding his own against the best generals of heaven. Each of his hairs possesses magical properties, and is capable of transforming into a clone of the Monkey King himself, or various weapons, animals, and other objects. He also knows various spells in order to command wind, part water, conjure protective circles against demons, freeze humans, demons, and gods alike. -- ["Sun Wukong's Wikipedia entry":http://en.wikipedia.org/wiki/Wukong]
161
+
162
+ The "Jaime Hewlett / Damon Albarn short":http://news.bbc.co.uk/sport1/hi/olympics/monkey that the BBC made for their 2008 Olympics coverage gives the general idea.
163
+
164
+ h2. What tools does Wukong work with?
165
+
166
+ Wukong is friends with "Hadoop":http://hadoop.apache.org/core the elephant, "Pig":http://hadoop.apache.org/pig/ the query language, and the @cat@ on your command line. We're looking forward to being friends with "martinis":http://datamapper.org and "express trains":http://wiki.rubyonrails.org/rails/pages/ActiveRecord down the road.
data/bin/cutc ADDED
@@ -0,0 +1,30 @@
1
+ #!/usr/bin/env bash
2
+
3
+ #
4
+ # cut 1
5
+ #
6
+ # Example:
7
+ #
8
+ # A quickie histogram of timestamps; say that for the object in the foo/bar
9
+ # directory, field 3 holds a flat timestamp (YYYYmmddHHMMSS) and you want a
10
+ # histogram by hour (and that foo/bar is small enough to be worth sucking
11
+ # through a single machine):
12
+ #
13
+ # hdp-catd foo/bar | cuttab 3 | cutc 12 | sort | uniq -c
14
+ #
15
+ # If foo/bar is already sorted leave out the call to sort.
16
+ #
17
+
18
+
19
+ #
20
+ # Set it to cut up to $1 (if defined), or if not, up to $CUTC_MAX (if defined), or 200 chars as a fallback.
21
+ #
22
+ CUTC_MAX=${CUTC_MAX-200}
23
+ CUTC_MAX=${1-$CUTC_MAX}
24
+ cutchars="1-${CUTC_MAX}"
25
+ shift
26
+
27
+ #
28
+ # Do the cuttin'
29
+ #
30
+ cut -c"${cutchars}" "$@"
data/bin/cuttab ADDED
@@ -0,0 +1,5 @@
1
+ #!/usr/bin/env bash
2
+
3
+ fields=${1-"1-"}
4
+ shift
5
+ cut -d' ' -f"$fields" "$@"
data/bin/greptrue ADDED
@@ -0,0 +1,8 @@
1
+ #!/usr/bin/env bash
2
+
3
+ # runs grep but always returns a true exit status. (Otherwise hadoop vomits)
4
+ grep "$@"
5
+ true
6
+ # runs grep but always returns a true exit status. (Otherwise hadoop vomits)
7
+ egrep "$@"
8
+ true
data/bin/hdp-cat ADDED
@@ -0,0 +1,3 @@
1
+ #!/usr/bin/env bash
2
+
3
+ hadoop dfs -cat "$@"
data/bin/hdp-catd ADDED
@@ -0,0 +1,3 @@
1
+ #!/usr/bin/env bash
2
+ args=`echo "$@" | ruby -ne 'a = $_.split(/\s+/); puts a.map{|arg| arg+"/[^_]*" }.join(" ")'`
3
+ hadoop dfs -cat $args
data/bin/hdp-du ADDED
@@ -0,0 +1,81 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ OPTIONS={}
4
+
5
+ #
6
+ # grok options
7
+ #
8
+ if ARGV[0] =~ /-[a-z]+/
9
+ flags = ARGV.shift
10
+ OPTIONS[:summary] = flags.include?('s')
11
+ OPTIONS[:humanize] = flags.include?('h')
12
+ end
13
+
14
+ #
15
+ # Prepare command
16
+ #
17
+ def prepare_command
18
+ dfs_cmd = OPTIONS[:summary] ? 'dus' : 'du'
19
+ dfs_args = "'" + ARGV.join("' '") + "'"
20
+ %Q{ hadoop dfs -#{dfs_cmd} #{dfs_args} }
21
+ end
22
+
23
+ Numeric.class_eval do
24
+ def bytes() self ; end
25
+ alias :byte :bytes
26
+ def kilobytes() self * 1024 ; end
27
+ alias :kilobyte :kilobytes
28
+ def megabytes() self * 1024.kilobytes ; end
29
+ alias :megabyte :megabytes
30
+ def gigabytes() self * 1024.megabytes ; end
31
+ alias :gigabyte :gigabytes
32
+ def terabytes() self * 1024.gigabytes ; end
33
+ alias :terabyte :terabytes
34
+ def petabytes() self * 1024.terabytes ; end
35
+ alias :petabyte :petabytes
36
+ def exabytes() self * 1024.petabytes ; end
37
+ alias :exabyte :exabytes
38
+ end
39
+
40
+ # Formats the bytes in +size+ into a more understandable representation
41
+ # (e.g., giving it 1500 yields 1.5 KB). This method is useful for
42
+ # reporting file sizes to users. This method returns nil if
43
+ # +size+ cannot be converted into a number. You can change the default
44
+ # precision of 1 using the precision parameter +precision+.
45
+ #
46
+ # ==== Examples
47
+ # number_to_human_size(123) # => 123 Bytes
48
+ # number_to_human_size(1234) # => 1.2 KB
49
+ # number_to_human_size(12345) # => 12.1 KB
50
+ # number_to_human_size(1234567) # => 1.2 MB
51
+ # number_to_human_size(1234567890) # => 1.1 GB
52
+ # number_to_human_size(1234567890123) # => 1.1 TB
53
+ # number_to_human_size(1234567, 2) # => 1.18 MB
54
+ # number_to_human_size(483989, 0) # => 4 MB
55
+ def number_to_human_size(size, precision=1)
56
+ size = Kernel.Float(size)
57
+ case
58
+ when size.to_i == 1; "1 Byte"
59
+ when size < 1.kilobyte; "%d Bytes" % size
60
+ when size < 1.megabyte; "%.#{precision}f KB" % (size / 1.0.kilobyte)
61
+ when size < 1.gigabyte; "%.#{precision}f MB" % (size / 1.0.megabyte)
62
+ when size < 1.terabyte; "%.#{precision}f GB" % (size / 1.0.gigabyte)
63
+ else "%.#{precision}f TB" % (size / 1.0.terabyte)
64
+ end.sub(/([0-9]\.\d*?)0+ /, '\1 ' ).sub(/\. /,' ')
65
+ rescue
66
+ nil
67
+ end
68
+
69
+ def format_output file, size
70
+ human_size = number_to_human_size(size) || 3
71
+ file = file.gsub(%r{hdfs://[^/]+/}, '/') # kill off hdfs paths, otherwise leave it alone
72
+ "%-71s\t%15d\t%15s" % [file, size.to_i, human_size]
73
+ end
74
+
75
+
76
+ %x{ #{prepare_command} }.split("\n").each do |line|
77
+ if line =~ /^Found \d+ items$/ then puts line ; next end
78
+ info = line.split(/\s+/)
79
+ if OPTIONS[:summary] then file, size = info else size, file = info end
80
+ puts format_output(file, size)
81
+ end
data/bin/hdp-get ADDED
@@ -0,0 +1,3 @@
1
+ #!/usr/bin/env bash
2
+
3
+ hadoop dfs -copyToLocal "$1" "$2"
data/bin/hdp-kill ADDED
@@ -0,0 +1,3 @@
1
+ #!/usr/bin/env bash
2
+
3
+ hadoop job -kill "$@"
data/bin/hdp-ls ADDED
@@ -0,0 +1,10 @@
1
+ #!/usr/bin/env bash
2
+
3
+ if [ "$1" == "-r" ] || [ "$1" == "-R" ] ; then
4
+ shift
5
+ action=lsr
6
+ else
7
+ action=ls
8
+ fi
9
+
10
+ hadoop dfs -$action "$@"
data/bin/hdp-mkdir ADDED
@@ -0,0 +1,3 @@
1
+ #!/usr/bin/env bash
2
+
3
+ hadoop dfs -mkdir "$@"
data/bin/hdp-mv ADDED
@@ -0,0 +1,3 @@
1
+ #!/usr/bin/env bash
2
+
3
+ hadoop dfs -mv "$@"