wukong 2.0.2 → 3.0.0.pre

Sign up to get free protection for your applications and to get access to all the features.
Files changed (268) hide show
  1. data/.document +5 -0
  2. data/.gitignore +46 -0
  3. data/.gitmodules +3 -0
  4. data/.rspec +2 -0
  5. data/.travis.yml +12 -0
  6. data/.yardopts +19 -0
  7. data/CHANGELOG.md +7 -0
  8. data/Gemfile +3 -0
  9. data/Guardfile +14 -0
  10. data/NOTES-travis.md +31 -0
  11. data/README.md +422 -0
  12. data/Rakefile +12 -0
  13. data/TODO.md +99 -0
  14. data/VERSION +1 -0
  15. data/bin/hdp-cp +0 -0
  16. data/bin/wu-flow +10 -0
  17. data/bin/wu-map +17 -0
  18. data/bin/wu-red +17 -0
  19. data/bin/wukong +17 -0
  20. data/data/CREDITS.md +355 -0
  21. data/data/graph/airfares.tsv +2174 -0
  22. data/data/text/gift_of_the_magi.txt +225 -0
  23. data/data/text/jabberwocky.txt +36 -0
  24. data/data/text/rectification_of_names.txt +33 -0
  25. data/{spec/data → data/twitter}/a_atsigns_b.tsv +0 -0
  26. data/{spec/data → data/twitter}/a_follows_b.tsv +0 -0
  27. data/{spec/data → data/twitter}/tweet.tsv +0 -0
  28. data/{spec/data → data/twitter}/twitter_user.tsv +0 -0
  29. data/data/wikipedia/dbpedia-sentences.tsv +1000 -0
  30. data/examples/dataflow.rb +28 -0
  31. data/examples/{server_logs/logline.rb → dataflow/apache_log_line.rb} +28 -18
  32. data/examples/dataflow/complex.rb +11 -0
  33. data/examples/dataflow/donuts.rb +13 -0
  34. data/examples/dataflow/parse_apache_logs.rb +16 -0
  35. data/examples/dataflow/pig_latinizer.rb +16 -0
  36. data/examples/dataflow/simple.rb +12 -0
  37. data/examples/dataflow/telegram.rb +45 -0
  38. data/examples/examples_helper.rb +9 -0
  39. data/examples/graph/minimum_spanning_tree.rb +73 -0
  40. data/examples/graph/union_find.rb +62 -0
  41. data/examples/text/latinize_text.rb +0 -0
  42. data/examples/text/pig_latin.rb +35 -0
  43. data/examples/tiny_count.rb +8 -0
  44. data/examples/tiny_count/jabberwocky_output.tsv +92 -0
  45. data/examples/twitter/locations.rb +29 -0
  46. data/examples/twitter/models.rb +24 -0
  47. data/examples/twitter/pt1-fiddle.pig +8 -0
  48. data/examples/twitter/pt2-simple_parse.pig +31 -0
  49. data/examples/twitter/pt2-simple_parse.rb +18 -0
  50. data/examples/twitter/pt3-join_on_zips.pig +39 -0
  51. data/examples/twitter/pt4-strong_links.rb +20 -0
  52. data/examples/twitter/pt5-lnglat_and_strong_links.pig +16 -0
  53. data/examples/twitter/states.tsv +50 -0
  54. data/examples/word_count.rb +36 -63
  55. data/examples/workflow/cherry_pie.md +104 -0
  56. data/examples/workflow/cherry_pie.rb +66 -0
  57. data/examples/workflow/fiddle.rb +24 -0
  58. data/examples/workflow/package_gem.rb +55 -0
  59. data/lib/{wukong/encoding.rb → away/escapement.rb} +0 -0
  60. data/lib/away/exe.rb +11 -0
  61. data/lib/away/experimental.rb +5 -0
  62. data/lib/away/from_file.rb +52 -0
  63. data/lib/away/job.rb +56 -0
  64. data/lib/away/job/rake_compat.rb +17 -0
  65. data/lib/away/registry.rb +79 -0
  66. data/lib/away/runner.rb +276 -0
  67. data/lib/away/runner/execute.rb +121 -0
  68. data/lib/away/script.rb +161 -0
  69. data/lib/away/script/hadoop_command.rb +240 -0
  70. data/lib/away/source/file_list_source.rb +15 -0
  71. data/lib/away/source/looper.rb +18 -0
  72. data/lib/away/task.rb +219 -0
  73. data/lib/hanuman.rb +9 -0
  74. data/lib/hanuman/action.rb +21 -0
  75. data/lib/hanuman/chain.rb +4 -0
  76. data/lib/hanuman/graph.rb +51 -0
  77. data/lib/hanuman/graphviz.rb +74 -0
  78. data/lib/hanuman/graphvizzer.rb +185 -0
  79. data/lib/hanuman/resource.rb +6 -0
  80. data/lib/hanuman/slot.rb +87 -0
  81. data/lib/hanuman/slottable.rb +220 -0
  82. data/lib/hanuman/stage.rb +51 -0
  83. data/lib/wukong.rb +31 -17
  84. data/lib/wukong/bad_record.rb +13 -16
  85. data/lib/wukong/dataflow.rb +103 -0
  86. data/lib/wukong/event.rb +44 -0
  87. data/lib/wukong/local_runner.rb +55 -0
  88. data/lib/wukong/mapred.rb +3 -0
  89. data/lib/wukong/model/faker.rb +136 -0
  90. data/lib/wukong/processor.rb +142 -0
  91. data/lib/wukong/settings.rb +0 -0
  92. data/lib/wukong/universe.rb +48 -0
  93. data/lib/wukong/version.rb +3 -0
  94. data/lib/wukong/widget/filter.rb +81 -0
  95. data/lib/wukong/widget/gibberish.rb +123 -0
  96. data/lib/wukong/widget/monitor.rb +26 -0
  97. data/lib/wukong/widget/reducer.rb +66 -0
  98. data/lib/wukong/widget/sink.rb +58 -0
  99. data/lib/wukong/widget/source.rb +120 -0
  100. data/lib/wukong/widget/stringifier.rb +50 -0
  101. data/lib/wukong/workflow.rb +22 -0
  102. data/lib/wukong/workflow/command.rb +42 -0
  103. data/old/config/emr-example.yaml +48 -0
  104. data/{examples → old/examples}/README.txt +0 -0
  105. data/{examples → old/examples}/contrib/jeans/README.markdown +0 -0
  106. data/{examples → old/examples}/contrib/jeans/data/normalized_sizes +0 -0
  107. data/{examples → old/examples}/contrib/jeans/data/orders.tsv +0 -0
  108. data/{examples → old/examples}/contrib/jeans/data/sizes +0 -0
  109. data/{examples → old/examples}/contrib/jeans/normalize.rb +0 -0
  110. data/{examples → old/examples}/contrib/jeans/sizes.rb +0 -0
  111. data/old/examples/corpus/bnc_word_freq.rb +44 -0
  112. data/{examples → old/examples}/corpus/bucket_counter.rb +0 -0
  113. data/{examples → old/examples}/corpus/dbpedia_abstract_to_sentences.rb +0 -0
  114. data/{examples → old/examples}/corpus/sentence_bigrams.rb +0 -0
  115. data/{examples → old/examples}/corpus/sentence_coocurrence.rb +0 -0
  116. data/old/examples/corpus/stopwords.rb +138 -0
  117. data/{examples → old/examples}/corpus/words_to_bigrams.rb +0 -0
  118. data/{examples → old/examples}/emr/README.textile +0 -0
  119. data/{examples → old/examples}/emr/dot_wukong_dir/credentials.json +0 -0
  120. data/{examples → old/examples}/emr/dot_wukong_dir/emr.yaml +0 -0
  121. data/{examples → old/examples}/emr/dot_wukong_dir/emr_bootstrap.sh +0 -0
  122. data/{examples → old/examples}/emr/elastic_mapreduce_example.rb +0 -0
  123. data/{examples → old/examples}/network_graph/adjacency_list.rb +0 -0
  124. data/{examples → old/examples}/network_graph/breadth_first_search.rb +0 -0
  125. data/{examples → old/examples}/network_graph/gen_2paths.rb +0 -0
  126. data/{examples → old/examples}/network_graph/gen_multi_edge.rb +0 -0
  127. data/{examples → old/examples}/network_graph/gen_symmetric_links.rb +0 -0
  128. data/{examples → old/examples}/pagerank/README.textile +0 -0
  129. data/{examples → old/examples}/pagerank/gen_initial_pagerank_graph.pig +0 -0
  130. data/{examples → old/examples}/pagerank/pagerank.rb +0 -0
  131. data/{examples → old/examples}/pagerank/pagerank_initialize.rb +0 -0
  132. data/{examples → old/examples}/pagerank/run_pagerank.sh +0 -0
  133. data/{examples → old/examples}/sample_records.rb +0 -0
  134. data/{examples → old/examples}/server_logs/apache_log_parser.rb +0 -4
  135. data/{examples → old/examples}/server_logs/breadcrumbs.rb +0 -0
  136. data/{examples → old/examples}/server_logs/nook.rb +0 -0
  137. data/{examples → old/examples}/server_logs/nook/faraday_dummy_adapter.rb +0 -0
  138. data/{examples → old/examples}/server_logs/user_agent.rb +0 -0
  139. data/{examples → old/examples}/simple_word_count.rb +0 -0
  140. data/{examples → old/examples}/size.rb +0 -0
  141. data/{examples → old/examples}/stats/avg_value_frequency.rb +0 -0
  142. data/{examples → old/examples}/stats/binning_percentile_estimator.rb +0 -0
  143. data/{examples → old/examples}/stats/data/avg_value_frequency.tsv +0 -0
  144. data/{examples → old/examples}/stats/rank_and_bin.rb +0 -0
  145. data/{examples → old/examples}/stupidly_simple_filter.rb +0 -0
  146. data/old/examples/word_count.rb +75 -0
  147. data/old/graph/graphviz_builder.rb +580 -0
  148. data/old/graph_easy/Attributes.pm +4181 -0
  149. data/old/graph_easy/Graphviz.pm +2232 -0
  150. data/old/wukong.rb +18 -0
  151. data/{lib → old}/wukong/and_pig.rb +0 -0
  152. data/old/wukong/bad_record.rb +18 -0
  153. data/{lib → old}/wukong/datatypes.rb +0 -0
  154. data/{lib → old}/wukong/datatypes/enum.rb +0 -0
  155. data/{lib → old}/wukong/datatypes/fake_types.rb +0 -0
  156. data/{lib → old}/wukong/decorator.rb +0 -0
  157. data/{lib → old}/wukong/encoding/asciize.rb +0 -0
  158. data/{lib → old}/wukong/extensions.rb +0 -0
  159. data/{lib → old}/wukong/extensions/array.rb +0 -0
  160. data/{lib → old}/wukong/extensions/blank.rb +0 -0
  161. data/{lib → old}/wukong/extensions/class.rb +0 -0
  162. data/{lib → old}/wukong/extensions/date_time.rb +0 -0
  163. data/{lib → old}/wukong/extensions/emittable.rb +0 -0
  164. data/{lib → old}/wukong/extensions/enumerable.rb +0 -0
  165. data/{lib → old}/wukong/extensions/hash.rb +0 -0
  166. data/{lib → old}/wukong/extensions/hash_keys.rb +0 -0
  167. data/{lib → old}/wukong/extensions/hash_like.rb +0 -0
  168. data/{lib → old}/wukong/extensions/hashlike_class.rb +0 -0
  169. data/{lib → old}/wukong/extensions/module.rb +0 -0
  170. data/{lib → old}/wukong/extensions/pathname.rb +0 -0
  171. data/{lib → old}/wukong/extensions/string.rb +0 -0
  172. data/{lib → old}/wukong/extensions/struct.rb +0 -0
  173. data/{lib → old}/wukong/extensions/symbol.rb +0 -0
  174. data/{lib → old}/wukong/filename_pattern.rb +0 -0
  175. data/old/wukong/helper.rb +7 -0
  176. data/old/wukong/helper/stopwords.rb +195 -0
  177. data/old/wukong/helper/tokenize.rb +35 -0
  178. data/{lib → old}/wukong/logger.rb +0 -0
  179. data/{lib → old}/wukong/periodic_monitor.rb +0 -0
  180. data/{lib → old}/wukong/schema.rb +0 -0
  181. data/{lib → old}/wukong/script.rb +0 -0
  182. data/{lib → old}/wukong/script/avro_command.rb +0 -0
  183. data/{lib → old}/wukong/script/cassandra_loader_script.rb +0 -0
  184. data/{lib → old}/wukong/script/emr_command.rb +0 -0
  185. data/{lib → old}/wukong/script/hadoop_command.rb +0 -0
  186. data/{lib → old}/wukong/script/local_command.rb +4 -1
  187. data/{lib → old}/wukong/store.rb +0 -0
  188. data/{lib → old}/wukong/store/base.rb +0 -0
  189. data/{lib → old}/wukong/store/cassandra.rb +0 -0
  190. data/{lib → old}/wukong/store/cassandra/streaming.rb +0 -0
  191. data/{lib → old}/wukong/store/cassandra/struct_loader.rb +0 -0
  192. data/{lib → old}/wukong/store/cassandra_model.rb +0 -0
  193. data/{lib → old}/wukong/store/chh_chunked_flat_file_store.rb +0 -0
  194. data/{lib → old}/wukong/store/chunked_flat_file_store.rb +0 -0
  195. data/{lib → old}/wukong/store/conditional_store.rb +0 -0
  196. data/{lib → old}/wukong/store/factory.rb +0 -0
  197. data/{lib → old}/wukong/store/flat_file_store.rb +0 -0
  198. data/{lib → old}/wukong/store/key_store.rb +0 -0
  199. data/{lib → old}/wukong/store/null_store.rb +0 -0
  200. data/{lib → old}/wukong/store/read_thru_store.rb +0 -0
  201. data/{lib → old}/wukong/store/tokyo_tdb_key_store.rb +0 -0
  202. data/{lib → old}/wukong/store/tyrant_rdb_key_store.rb +0 -0
  203. data/{lib → old}/wukong/store/tyrant_tdb_key_store.rb +0 -0
  204. data/{lib → old}/wukong/streamer.rb +8 -0
  205. data/{lib → old}/wukong/streamer/accumulating_reducer.rb +0 -0
  206. data/{lib → old}/wukong/streamer/base.rb +2 -1
  207. data/{lib → old}/wukong/streamer/counting_reducer.rb +0 -0
  208. data/{lib → old}/wukong/streamer/filter.rb +0 -0
  209. data/old/wukong/streamer/instance_streamer.rb +15 -0
  210. data/old/wukong/streamer/json_streamer.rb +21 -0
  211. data/{lib → old}/wukong/streamer/line_streamer.rb +0 -0
  212. data/{lib → old}/wukong/streamer/list_reducer.rb +0 -0
  213. data/{lib → old}/wukong/streamer/rank_and_bin_reducer.rb +0 -0
  214. data/{lib → old}/wukong/streamer/record_streamer.rb +0 -0
  215. data/{lib → old}/wukong/streamer/reducer.rb +0 -0
  216. data/{lib → old}/wukong/streamer/set_reducer.rb +0 -0
  217. data/{lib → old}/wukong/streamer/struct_streamer.rb +0 -0
  218. data/{lib → old}/wukong/streamer/summing_reducer.rb +0 -0
  219. data/{lib → old}/wukong/streamer/uniq_by_last_reducer.rb +0 -0
  220. data/{lib → old}/wukong/typed_struct.rb +0 -0
  221. data/spec/away/encoding_spec.rb +32 -0
  222. data/spec/away/exe_spec.rb +20 -0
  223. data/spec/away/flow_spec.rb +82 -0
  224. data/spec/away/graph_spec.rb +6 -0
  225. data/spec/away/job_spec.rb +15 -0
  226. data/spec/away/rake_compat_spec.rb +9 -0
  227. data/spec/away/script_spec.rb +81 -0
  228. data/spec/examples/dataflow/parse_apache_logs_spec.rb +8 -0
  229. data/spec/examples/dataflow/parsing_spec.rb +13 -0
  230. data/spec/examples/dataflow/simple_spec.rb +8 -0
  231. data/spec/examples/dataflow/telegram_spec.rb +43 -0
  232. data/spec/examples/graph/minimum_spanning_tree_spec.rb +35 -0
  233. data/spec/examples/text/pig_latin_spec.rb +21 -0
  234. data/spec/examples/workflow/cherry_pie_spec.rb +6 -0
  235. data/spec/hanuman/graph_spec.rb +17 -0
  236. data/spec/hanuman/graphviz_spec.rb +29 -0
  237. data/spec/hanuman/slot_spec.rb +2 -0
  238. data/spec/hanuman/stage_spec.rb +12 -0
  239. data/spec/spec_helper.rb +24 -6
  240. data/spec/support/examples_helper.rb +10 -0
  241. data/spec/support/hanuman_test_helpers.rb +90 -0
  242. data/spec/support/streamer_test_helpers.rb +6 -0
  243. data/spec/support/wukong_test_helpers.rb +43 -0
  244. data/spec/support/wukong_widget_helpers.rb +66 -0
  245. data/spec/wukong/dataflow_spec.rb +65 -0
  246. data/spec/wukong/local_runner_spec.rb +31 -0
  247. data/spec/wukong/model/faker_spec.rb +131 -0
  248. data/spec/wukong/processor_spec.rb +109 -0
  249. data/spec/wukong/runner_spec.rb +12 -0
  250. data/spec/wukong/widget/filter_spec.rb +99 -0
  251. data/spec/wukong/widget/sink_spec.rb +19 -0
  252. data/spec/wukong/widget/source_spec.rb +41 -0
  253. data/spec/wukong/widget/stringifier_spec.rb +51 -0
  254. data/spec/wukong/workflow/command_spec.rb +5 -0
  255. data/wukong.gemspec +36 -277
  256. metadata +421 -165
  257. data/CHANGELOG.textile +0 -106
  258. data/INSTALL.textile +0 -89
  259. data/README.textile +0 -274
  260. data/TODO.textile +0 -11
  261. data/examples/ignore_me/counting.rb +0 -55
  262. data/examples/ignore_me/grouper.rb +0 -71
  263. data/old/cassandra_streaming/berlitz_for_cassandra.textile +0 -22
  264. data/old/cassandra_streaming/client_interface_notes.textile +0 -200
  265. data/old/cassandra_streaming/client_schema.textile +0 -318
  266. data/old/cassandra_streaming/tuning.textile +0 -73
  267. data/spec/wukong/encoding_spec.rb +0 -36
  268. data/spec/wukong/script_spec.rb +0 -80
@@ -0,0 +1,121 @@
1
+ # -*- coding: utf-8 -*-
2
+
3
+ #
4
+ # Taken from the [Aruba project](). Original license:
5
+ #
6
+ # Copyright (c) 2010 Aslak Hellesøy, David Chelimsky
7
+ #
8
+ # Permission is hereby granted, free of charge, to any person obtaining
9
+ # a copy of this software and associated documentation files (the
10
+ # "Software"), to deal in the Software without restriction, including
11
+ # without limitation the rights to use, copy, modify, merge, publish,
12
+ # distribute, sublicense, and/or sell copies of the Software, and to
13
+ # permit persons to whom the Software is furnished to do so, subject to
14
+ # the following conditions:
15
+ #
16
+ # The above copyright notice and this permission notice shall be
17
+ # included in all copies or substantial portions of the Software.
18
+ #
19
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
20
+ # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
21
+ # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
22
+ # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
23
+ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
24
+ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
25
+ # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26
+
27
+
28
+ require 'childprocess'
29
+ require 'tempfile'
30
+
31
+ module Wukong
32
+ class Runner
33
+
34
+ class Process
35
+ attr_reader :keep_ansi
36
+
37
+ def initialize(cmd, exit_timeout=2.0, io_wait=2.0, keep_ansi=true)
38
+ @exit_timeout = exit_timeout
39
+ @io_wait = io_wait
40
+ @keep_ansi = true
41
+
42
+ @process = ChildProcess.build(*cmd)
43
+ @process.io.stdout = raw_out_io
44
+ @process.io.stderr = raw_err_io
45
+ @process.duplex = true
46
+ end
47
+
48
+ def raw_out_io
49
+ @raw_out_io ||= StringIO.new('', 'w')
50
+ end
51
+
52
+ def raw_err_io
53
+ @raw_err_io ||= StringIO.new('', 'w')
54
+ end
55
+
56
+ def run!(&block)
57
+ @process.start
58
+ yield self if block_given?
59
+ end
60
+
61
+ def stdin
62
+ wait_for_io do
63
+ @process.io.stdin.sync = true
64
+ @process.io.stdin
65
+ end
66
+ end
67
+
68
+ def output
69
+ stdout + stderr
70
+ end
71
+
72
+ def stdout
73
+ wait_for_io do
74
+ @raw_out_io.rewind
75
+ filter_ansi(@raw_out_io.read)
76
+ end
77
+ end
78
+
79
+ def stderr
80
+ wait_for_io do
81
+ @raw_err_io.rewind
82
+ filter_ansi(@raw_err_io.read)
83
+ end
84
+ end
85
+
86
+ def stop(reader)
87
+ return unless @process
88
+ unless @process.exited?
89
+ reader.stdout stdout
90
+ reader.stderr stderr
91
+ @process.poll_for_exit(@exit_timeout)
92
+ end
93
+ @process.exit_code
94
+ end
95
+
96
+ def terminate
97
+ if @process
98
+ flush
99
+ @process.stop
100
+ flush
101
+ end
102
+ end
103
+
104
+ def flush
105
+ stdout && stderr # flush output
106
+ end
107
+
108
+ private
109
+
110
+ def wait_for_io(&block)
111
+ sleep @io_wait if @process.alive?
112
+ yield
113
+ end
114
+
115
+ def filter_ansi(string)
116
+ keep_ansi ? string : string.gsub(/\e\[\d+(?>(;\d+)*)m/, '')
117
+ end
118
+
119
+ end
120
+ end
121
+ end
@@ -0,0 +1,161 @@
1
+ require 'wukong'
2
+ require 'wukong/script/hadoop_command'
3
+ require 'wukong/experimental'
4
+
5
+ #
6
+ # Runner settings
7
+ #
8
+
9
+ Settings.define :mode, :type => Symbol, :default => :mapper, :env_var => 'WUKONG_MODE', :description => "run the script's workflow: Specify 'hadoop' to use hadoop streaming; 'local' to run your_script.rb --map | sort | your_script.rb --reduce; 'emr' to launch on the amazon cloud; 'mapper' or 'reducer' to run that phase.", :wukong => true
10
+ Settings.define :dry_run, :description => "echo the command that will be run, but don't run it", :wukong => true
11
+ Settings.define :rm, :description => "Recursively remove the destination directory. Only used in hadoop mode.", :wukong => true
12
+ Settings.define :script_file, :type => :filename, :description => "script file to execute, or give as first arg", :wukong => true
13
+
14
+ module Wukong
15
+ # adds ability to execute
16
+ extend Wukong::Mixin::FromFile
17
+
18
+ def self.from_file(filename)
19
+ filename = filename.to_s
20
+ filename += ".rb" if filename !~ /\.rb$/
21
+ super(filename)
22
+ end
23
+
24
+ def self.run(filename=nil)
25
+ if filename
26
+ self.from_file(filename)
27
+ else
28
+ Settings.resolve!
29
+ end
30
+ if @main_run then return false ; end
31
+ Wukong::Script.new(Settings).run
32
+ @main_run = true
33
+ end
34
+
35
+ #
36
+ # sources a script file,
37
+ #
38
+ class Script
39
+ attr_reader :settings # configliere hash of settings
40
+ attr_reader :script_file # File to execute
41
+ attr_reader :input_paths
42
+ attr_reader :output_path
43
+
44
+ include Wukong::Script::HadoopCommand
45
+
46
+ def initialize(settings)
47
+ @settings = settings
48
+
49
+ @output_path = settings.rest.pop
50
+ @input_paths = settings.rest.reject(&:blank?)
51
+ end
52
+
53
+
54
+ # Execute the script file in the context of the Wukong module
55
+ def run_flow
56
+ Log.debug( "Running #{script_file} with settings #{settings}")
57
+ script_file = settings.script_file
58
+ mode = settings.mode
59
+ Wukong.flow(mode).run
60
+ end
61
+
62
+ #
63
+ # In --run mode, use the framework (local, hadoop, emr, etc) to re-launch
64
+ # the script as mapper, reducer, etc.
65
+ # If --map or --reduce, dispatch to the mapper or reducer.
66
+ #
67
+ def run
68
+ case settings.mode
69
+ when :local then execute_local_workflow
70
+ when :hadoop, :mapred then execute_hadoop_workflow
71
+ else
72
+ run_flow
73
+ end
74
+ end
75
+
76
+ #
77
+ # Shell command for map phase. By default, calls the script in --map mode
78
+ # In hadoop mode, this is given to the hadoop streaming command.
79
+ # In local mode, it's given to the system() call
80
+ #
81
+ def mapper_commandline
82
+ "#{ruby_interpreter_path} #{this_script_filename} --mode=mapper " + non_wukong_params
83
+ end
84
+
85
+ #
86
+ # Shell command for reduce phase. By default, calls the script in --reduce mode
87
+ # In hadoop mode, this is given to the hadoop streaming command.
88
+ # In local mode, it's given to the system() call
89
+ #
90
+ def reducer_commandline
91
+ "#{ruby_interpreter_path} #{this_script_filename} --mode=reducer " + non_wukong_params
92
+ end
93
+
94
+ def job_name
95
+ settings[:job_name] ||
96
+ "#{File.basename(this_script_filename)}---#{input_paths}---#{output_path}".gsub(%r{[^\w/\.\-\+]+}, '')
97
+ end
98
+
99
+ # Wrapper for dangerous operations to catch errors
100
+ def safely action, &block
101
+ begin
102
+ block.call
103
+ rescue StandardError => e ; handle_error(action, e); end
104
+ end
105
+
106
+ protected
107
+
108
+ #
109
+ # Execute the runner phase:
110
+ # use the running framework to relaunch the script in map and in reduce mode
111
+ #
112
+ def execute_command! *args
113
+ command = args.flatten.reject(&:blank?).join(" \\\n ")
114
+ Log.info "Running\n\n#{command}\n"
115
+ if settings[:dry_run]
116
+ Log.info '== [Not running preceding command: dry run] =='
117
+ else
118
+ maybe_overwrite_output_paths! output_path
119
+ $stdout.puts `#{command}`
120
+ raise "Streaming command failed!" unless $?.success?
121
+ end
122
+ end
123
+
124
+ #
125
+ # In hadoop mode only, removes the destination path before launching
126
+ #
127
+ # To the panic-stricken: look in .Trash/current/path/to/accidentally_deleted_files
128
+ #
129
+ def maybe_overwrite_output_paths! output_path
130
+ if (settings.rm && (settings.mode == :hadoop))
131
+ cmd = %Q{#{hadoop_runner} fs -rmr '#{output_path}'}
132
+ Log.info "Removing output file #{output_path}: #{cmd}"
133
+ puts `#{cmd}`
134
+ end
135
+ end
136
+
137
+ # Reassemble all the non-internal-to-wukong settings into a command line for
138
+ # the map/reducer phase scripts
139
+ def non_wukong_params
140
+ settings.
141
+ reject{|param, val| settings.definition_of(param, :wukong) }.
142
+ reject{|param, val| param.to_s =~ /catalog_root/ }.
143
+ map{|param,val| "--#{param}=#{val}" }.
144
+ join(" ")
145
+ end
146
+
147
+ # the full, real path to the script file
148
+ def this_script_filename
149
+ Pathname.new($0).realpath
150
+ end
151
+
152
+ # use the full ruby interpreter path to run slave processes
153
+ def ruby_interpreter_path
154
+ Pathname.new(File.join(
155
+ Config::CONFIG["bindir"],
156
+ Config::CONFIG["RUBY_INSTALL_NAME"]+Config::CONFIG["EXEEXT"])).realpath
157
+ end
158
+
159
+
160
+ end
161
+ end
@@ -0,0 +1,240 @@
1
+ # -*- coding: utf-8 -*-
2
+ module Wukong
3
+ class Script
4
+ module HadoopCommand
5
+
6
+ # ===========================================================================
7
+ #
8
+ # Hadoop Options
9
+ #
10
+ Settings.define :hadoop_home, :default => '/usr/lib/hadoop', :description => "Path to hadoop installation; ENV['HADOOP_HOME'] by default. HADOOP_HOME/bin/hadoop is used to run hadoop.", :env_var => 'HADOOP_HOME', :wukong => true
11
+ Settings.define :hadoop_runner, :description => "Path to hadoop script. Usually set --hadoop_home instead of this.", :wukong => true
12
+
13
+ #
14
+ # Translate simplified args to their hairy hadoop equivalents
15
+ #
16
+ Settings.define :io_sort_mb, :jobconf => true, :description => 'io.sort.mb', :wukong => true
17
+ Settings.define :io_sort_record_percent, :jobconf => true, :description => 'io.sort.record.percent', :wukong => true
18
+ Settings.define :job_name, :jobconf => true, :description => 'mapred.job.name', :wukong => true
19
+ Settings.define :key_field_separator, :jobconf => true, :description => 'map.output.key.field.separator', :wukong => true
20
+ Settings.define :map_speculative, :jobconf => true, :description => 'mapred.map.tasks.speculative.execution', :wukong => true
21
+ Settings.define :map_tasks, :jobconf => true, :description => 'mapred.map.tasks', :wukong => true
22
+ Settings.define :max_maps_per_cluster, :jobconf => true, :description => 'mapred.max.maps.per.cluster', :wukong => true
23
+ Settings.define :max_maps_per_node, :jobconf => true, :description => 'mapred.max.maps.per.node', :wukong => true
24
+ Settings.define :max_node_map_tasks, :jobconf => true, :description => 'mapred.tasktracker.map.tasks.maximum', :wukong => true
25
+ Settings.define :max_node_reduce_tasks, :jobconf => true, :description => 'mapred.tasktracker.reduce.tasks.maximum', :wukong => true
26
+ Settings.define :max_record_length, :jobconf => true, :description => 'mapred.linerecordreader.maxlength', :wukong => true # "Safeguards against corrupted data: lines longer than this (in bytes) are treated as bad records."
27
+ Settings.define :max_reduces_per_cluster,:jobconf => true, :description => 'mapred.max.reduces.per.cluster', :wukong => true
28
+ Settings.define :max_reduces_per_node, :jobconf => true, :description => 'mapred.max.reduces.per.node', :wukong => true
29
+ Settings.define :max_tracker_failures, :jobconf => true, :description => 'mapred.max.tracker.failures', :wukong => true
30
+ Settings.define :max_map_attempts, :jobconf => true, :description => 'mapred.map.max.attempts', :wukong => true
31
+ Settings.define :max_reduce_attempts, :jobconf => true, :description => 'mapred.reduce.max.attempts', :wukong => true
32
+ Settings.define :min_split_size, :jobconf => true, :description => 'mapred.min.split.size', :wukong => true
33
+ Settings.define :output_field_separator, :jobconf => true, :description => 'stream.map.output.field.separator', :wukong => true
34
+ Settings.define :partition_fields, :jobconf => true, :description => 'num.key.fields.for.partition', :wukong => true
35
+ Settings.define :reduce_tasks, :jobconf => true, :description => 'mapred.reduce.tasks', :wukong => true
36
+ Settings.define :respect_exit_status, :jobconf => true, :description => 'stream.non.zero.exit.is.failure', :wukong => true
37
+ Settings.define :reuse_jvms, :jobconf => true, :description => 'mapred.job.reuse.jvm.num.tasks', :wukong => true
38
+ Settings.define :sort_fields, :jobconf => true, :description => 'stream.num.map.output.key.fields', :wukong => true
39
+ Settings.define :timeout, :jobconf => true, :description => 'mapred.task.timeout', :wukong => true
40
+ Settings.define :noempty, :description => "don't create zero-byte reduce files (hadoop mode only)", :wukong => true
41
+ Settings.define :split_on_xml_tag, :description => "Parse XML document by specifying the tag name: 'anything found between <tag> and </tag> will be treated as one record for map tasks'", :wukong => true
42
+
43
+ # emit a -jobconf hadoop option if the simplified command line arg is present
44
+ # if not, the resulting nil will be elided later
45
+ def jobconf option
46
+ if settings[option]
47
+ # "-jobconf %s=%s" % [settings.definition_of(option, :description), settings[option]]
48
+ "-D %s=%s" % [settings.definition_of(option, :description), settings[option]]
49
+ end
50
+ end
51
+
52
+ #
53
+ # Assemble the hadoop command to execute
54
+ # and launch the hadoop runner to execute the script across all tasktrackers
55
+ #
56
+ # FIXME: Should add some simple logic to ensure that commands are in the
57
+ # right order or hadoop will complain. ie. -D settings MUST come before
58
+ # others
59
+ #
60
+ def execute_hadoop_workflow
61
+ # Input paths join by ','
62
+ input_paths = @input_paths.join(',')
63
+ #
64
+ # Use Settings[:hadoop_home] to set the path your config install.
65
+ hadoop_commandline = [
66
+ hadoop_runner,
67
+ "jar #{settings[:hadoop_home]}/contrib/streaming/hadoop-*streaming*.jar",
68
+ hadoop_jobconf_settings,
69
+ "-D mapred.job.name='#{job_name}'",
70
+ hadoop_other_args,
71
+ "-mapper '#{mapper_commandline}'",
72
+ "-reducer '#{reducer_commandline}'",
73
+ "-input '#{input_paths}'",
74
+ "-output '#{output_path}'",
75
+ "-file '#{this_script_filename}'",
76
+ hadoop_recycle_env,
77
+ ].flatten.compact.join(" \t\\\n ")
78
+ Log.info " Launching hadoop!"
79
+ execute_command!(hadoop_commandline)
80
+ end
81
+
82
+ def hadoop_jobconf_settings
83
+ jobconf_settings = []
84
+ # Fixup these settings
85
+ settings[:reuse_jvms] = '-1' if (settings[:reuse_jvms] == true)
86
+ settings[:respect_exit_status] = 'false' if (settings[:ignore_exit_status] == true)
87
+ # If no reducer and no reduce_command, then skip the reduce phase
88
+ # FIXME: auto-detect nil reducer
89
+ # settings[:reduce_tasks] = 0 if (! reducer) && (! settings[:reduce_command]) && (! settings[:reduce_tasks])
90
+ # Fields hadoop should use to distribute records to reducers
91
+ unless settings[:partition_fields].blank?
92
+ jobconf_settings += [
93
+ jobconf(:partition_fields),
94
+ jobconf(:output_field_separator),
95
+ ]
96
+ end
97
+ jobconf_settings += [
98
+ :io_sort_mb, :io_sort_record_percent,
99
+ :map_speculative, :map_tasks,
100
+ :max_maps_per_cluster, :max_maps_per_node,
101
+ :max_node_map_tasks, :max_node_reduce_tasks,
102
+ :max_reduces_per_cluster, :max_reduces_per_node,
103
+ :max_record_length, :min_split_size,
104
+ :output_field_separator, :key_field_separator,
105
+ :partition_fields, :sort_fields,
106
+ :reduce_tasks, :respect_exit_status,
107
+ :reuse_jvms, :timeout,
108
+ :max_tracker_failures, :max_map_attempts,
109
+ :max_reduce_attempts
110
+ ].map{|opt| jobconf(opt)}
111
+ jobconf_settings.flatten.compact
112
+ end
113
+
114
+ def hadoop_other_args
115
+ extra_str_args = [ settings[:extra_args] ]
116
+ if settings.split_on_xml_tag
117
+ extra_str_args << %Q{-inputreader 'StreamXmlRecordReader,begin=<#{settings.split_on_xml_tag}>,end=</#{settings.split_on_xml_tag}>'}
118
+ end
119
+ extra_str_args << ' -lazyOutput' if settings[:noempty] # don't create reduce file if no records
120
+ extra_str_args << ' -partitioner org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner' unless settings[:partition_fields].blank?
121
+ extra_str_args
122
+ end
123
+
124
+ def hadoop_recycle_env
125
+ %w[RUBYLIB].map do |var|
126
+ %Q{-cmdenv '#{var}=#{ENV[var]}'} if ENV[var]
127
+ end.compact
128
+ end
129
+
130
+ # The path to the hadoop runner script
131
+ def hadoop_runner
132
+ settings[:hadoop_runner] || (settings[:hadoop_home]+'/bin/hadoop')
133
+ end
134
+
135
+ module ClassMethods
136
+ #
137
+ # Via @pskomoroch via @tlipcon,
138
+ #
139
+ # "there is a little known Hadoop Streaming trick buried in this Python
140
+ # script. You will notice that the date is not actually in the raw log
141
+ # data itself, but is part of the filename. It turns out that Hadoop makes
142
+ # job parameters you would fetch in Java with something like
143
+ # job.get("mapred.input.file") available as environment variables for
144
+ # streaming jobs, with periods replaced with underscores:
145
+ #
146
+ # filepath = os.environ["map_input_file"]
147
+ # filename = os.path.split(filepath)[-1]
148
+ # Thanks to Todd Lipcon for directing me to that hack.
149
+ #
150
+
151
+ # HDFS pathname to the input file currently being processed.
152
+ def input_file
153
+ ENV['map_input_file']
154
+ end
155
+
156
+ # Directory of the input file
157
+ def input_dir
158
+ ENV['mapred_input_dir']
159
+ end
160
+
161
+ # Offset of this chunk within the input file
162
+ def map_input_start_offset
163
+ ENV['map_input_start']
164
+ end
165
+
166
+ # length of the mapper's input chunk
167
+ def map_input_length
168
+ ENV['map_input_length']
169
+ end
170
+
171
+ def attempt_id
172
+ ENV['mapred_task_id']
173
+ end
174
+ def curr_task_id
175
+ ENV['mapred_tip_id']
176
+ end
177
+
178
+ def script_cmdline_urlenc
179
+ ENV['stream_map_streamprocessor']
180
+ end
181
+ end
182
+
183
+ # Standard ClassMethods-on-include trick
184
+ def self.included base
185
+ base.class_eval do
186
+ extend ClassMethods
187
+ end
188
+ end
189
+ end
190
+ end
191
+ end
192
+
193
+ # -partitioner org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner \
194
+ # -D mapred.output.key.comparator.class=org.apache.hadoop.mapred.lib.KeyFieldBasedComparator \
195
+ # -D mapred.text.key.comparator.options=-k2,2nr\
196
+ # -D mapred.text.key.partitioner.options=-k1,2\
197
+ # -D mapred.text.key.partitioner.options=\"-k1,$partfields\"
198
+ # -D stream.num.map.output.key.fields=\"$sortfields\"
199
+ #
200
+ # -D stream.map.output.field.separator=\"'/t'\"
201
+ # -D map.output.key.field.separator=. \
202
+ # -D mapred.data.field.separator=. \
203
+ # -D map.output.key.value.fields.spec=6,5,1-3:0- \
204
+ # -D reduce.output.key.value.fields.spec=0-2:5- \
205
+
206
+ # "HADOOP_HOME" =>"/usr/lib/hadoop-0.20/bin/..",
207
+ # "HADOOP_IDENT_STRING" =>"hadoop",
208
+ # "HADOOP_LOGFILE" =>"hadoop-hadoop-tasktracker-ip-10-242-14-223.log",
209
+ # "HADOOP_LOG_DIR" =>"/usr/lib/hadoop-0.20/bin/../logs",
210
+ # "HOME" =>"/var/run/hadoop-0.20",
211
+ # "JAVA_HOME" =>"/usr/lib/jvm/java-6-sun",
212
+ # "LD_LIBRARY_PATH" =>"/usr/lib/jvm/java-6-sun-1.6.0.10/jre/lib/i386/client:/usr/lib/jvm/java-6-sun-1.6.0.10/jre/lib/i386:/usr/lib/jvm/java-6-sun-1.6.0.10/jre/../lib/i386:/mnt/hadoop/mapred/local/taskTracker/jobcache/job_200910221152_0023/attempt_200910221152_0023_m_000000_0/work:/usr/lib/jvm/java-6-sun-1.6.0.10/jre/lib/i386/client:/usr/lib/jvm/java-6-sun-1.6.0.10/jre/lib/i386:/usr/lib/jvm/java-6-sun-1.6.0.10/jre/../lib/i386",
213
+ # "PATH" =>"/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games",
214
+ # "USER" =>"hadoop",
215
+ #
216
+ # "dfs_block_size" =>"134217728",
217
+ # "map_input_start" =>"0",
218
+ # "map_input_length" =>"125726898",
219
+ # "mapred_output_key_class" =>"org.apache.hadoop.io.Text",
220
+ # "mapred_output_value_class" =>"org.apache.hadoop.io.Text",
221
+ # "mapred_output_format_class" =>"org.apache.hadoop.mapred.TextOutputFormat",
222
+ # "mapred_output_compression_codec" =>"org.apache.hadoop.io.compress.DefaultCodec",
223
+ # "mapred_output_compression_type" =>"BLOCK",
224
+ # "mapred_task_partition" =>"0",
225
+ # "mapred_tasktracker_map_tasks_maximum" =>"4",
226
+ # "mapred_tasktracker_reduce_tasks_maximum" =>"2",
227
+ # "mapred_tip_id" =>"task_200910221152_0023_m_000000",
228
+ # "mapred_task_id" =>"attempt_200910221152_0023_m_000000_0",
229
+ # "mapred_job_tracker" =>"ec2-174-129-141-78.compute-1.amazonaws.com:8021",
230
+ #
231
+ # "mapred_input_dir" =>"hdfs://ec2-174-129-141-78.compute-1.amazonaws.com/user/flip/ripd/com.tw/com.twitter.search/20090809",
232
+ # "map_input_file" =>"hdfs://ec2-174-129-141-78.compute-1.amazonaws.com/user/flip/ripd/com.tw/com.twitter.search/20090809/com.twitter.search+20090809233441-56735-womper.tsv.bz2",
233
+ # "mapred_working_dir" =>"hdfs://ec2-174-129-141-78.compute-1.amazonaws.com/user/flip",
234
+ # "mapred_work_output_dir" =>"hdfs://ec2-174-129-141-78.compute-1.amazonaws.com/user/flip/tmp/twsearch-20090809/_temporary/_attempt_200910221152_0023_m_000000_0",
235
+ # "mapred_output_dir" =>"hdfs://ec2-174-129-141-78.compute-1.amazonaws.com/user/flip/tmp/twsearch-20090809",
236
+ # "mapred_temp_dir" =>"/mnt/tmp/hadoop-hadoop/mapred/temp",
237
+ # "PWD" =>"/mnt/hadoop/mapred/local/taskTracker/jobcache/job_200910221152_0023/attempt_200910221152_0023_m_000000_0/work",
238
+ # "TMPDIR" =>"/mnt/hadoop/mapred/local/taskTracker/jobcache/job_200910221152_0023/attempt_200910221152_0023_m_000000_0/work/tmp",
239
+ # "stream_map_streamprocessor" =>"%2Fusr%2Fbin%2Fruby1.8+%2Fmnt%2Fhome%2Fflip%2Fics%2Fwuclan%2Fexamples%2Ftwitter%2Fparse%2Fparse_twitter_search_requests.rb+--map+--rm",
240
+ # "user_name" =>"flip",