wukong 3.0.0.pre2 → 3.0.0.pre3

Sign up to get free protection for your applications and to get access to all the features.
Files changed (146) hide show
  1. data/Gemfile +13 -0
  2. data/README.md +182 -6
  3. data/bin/wu-local +13 -5
  4. data/bin/wu-server +1 -1
  5. data/examples/Gemfile +2 -1
  6. data/examples/basic/string_reverser.rb +23 -0
  7. data/examples/{tiny_count.rb → basic/tiny_count.rb} +0 -0
  8. data/examples/{word_count → basic/word_count}/accumulator.rb +0 -0
  9. data/examples/{word_count → basic/word_count}/tokenizer.rb +0 -0
  10. data/examples/{word_count → basic/word_count}/word_count.rb +0 -0
  11. data/examples/deploy_pack/Gemfile +7 -0
  12. data/examples/deploy_pack/README.md +6 -0
  13. data/examples/{text/latinize_text.rb → deploy_pack/a/b/c/.gitkeep} +0 -0
  14. data/examples/deploy_pack/app/processors/string_reverser.rb +5 -0
  15. data/examples/deploy_pack/config/environment.rb +1 -0
  16. data/examples/{dataflow → dsl/dataflow}/fibonacci_series.rb +0 -0
  17. data/examples/dsl/dataflow/scraper_macro_flow.rb +28 -0
  18. data/examples/{dataflow → dsl/dataflow}/simple.rb +0 -0
  19. data/examples/{dataflow → dsl/dataflow}/telegram.rb +0 -0
  20. data/examples/{workflow → dsl/workflow}/cherry_pie.dot +0 -0
  21. data/examples/{workflow → dsl/workflow}/cherry_pie.md +0 -0
  22. data/examples/{workflow → dsl/workflow}/cherry_pie.png +0 -0
  23. data/examples/{workflow → dsl/workflow}/cherry_pie.rb +0 -0
  24. data/examples/empty/.gitkeep +0 -0
  25. data/examples/graph/implied_geolocation/README.md +63 -0
  26. data/examples/graph/{minimum_spanning_tree.rb → minimum_spanning_tree/airfares_graphviz.rb} +0 -0
  27. data/examples/munging/airline_flights/indexable.rb +75 -0
  28. data/examples/munging/airline_flights/indexable_spec.rb +90 -0
  29. data/examples/munging/geo/geonames_models.rb +29 -0
  30. data/examples/munging/wikipedia/dbpedia/dbpedia_common.rb +1 -0
  31. data/examples/munging/wikipedia/dbpedia/extract_links-cruft.rb +66 -0
  32. data/examples/munging/wikipedia/dbpedia/extract_links.rb +213 -146
  33. data/examples/rake_helper.rb +12 -0
  34. data/examples/ruby_project/Gemfile +7 -0
  35. data/examples/ruby_project/README.md +6 -0
  36. data/examples/ruby_project/a/b/c/.gitkeep +0 -0
  37. data/examples/serverlogs/geo_ip_mapping/munge_geolite.rb +82 -0
  38. data/examples/serverlogs/models/logline.rb +102 -0
  39. data/examples/{dataflow/parse_apache_logs.rb → serverlogs/parser/apache_parser_widget.rb} +0 -0
  40. data/examples/serverlogs/visit_paths/common.rb +4 -0
  41. data/examples/serverlogs/visit_paths/page_counts.pig +48 -0
  42. data/examples/serverlogs/visit_paths/serverlogs-01-parse-script.rb +11 -0
  43. data/examples/serverlogs/visit_paths/serverlogs-02-histograms-full.rb +31 -0
  44. data/examples/serverlogs/visit_paths/serverlogs-02-histograms-mapper.rb +12 -0
  45. data/examples/serverlogs/visit_paths/serverlogs-03-breadcrumbs-full.rb +67 -0
  46. data/examples/serverlogs/visit_paths/serverlogs-04-page_page_edges-full.rb +38 -0
  47. data/examples/text/{pig_latin.rb → pig_latin/pig_latinizer.rb} +0 -0
  48. data/examples/{dataflow/pig_latinizer.rb → text/pig_latin/pig_latinizer_widget.rb} +0 -0
  49. data/lib/hanuman/graph.rb +6 -1
  50. data/lib/wu/geo.rb +4 -0
  51. data/lib/wu/geo/geo_grids.numbers +0 -0
  52. data/lib/wu/geo/geolocated.rb +331 -0
  53. data/lib/wu/geo/quadtile.rb +69 -0
  54. data/{examples → lib/wu}/graph/union_find.rb +0 -0
  55. data/lib/wu/model/reconcilable.rb +63 -0
  56. data/{examples/munging/wikipedia/utils/munging_utils.rb → lib/wu/munging.rb} +7 -4
  57. data/lib/wu/social/models/twitter.rb +31 -0
  58. data/{examples/models/wikipedia.rb → lib/wu/wikipedia/models.rb} +0 -0
  59. data/lib/wukong.rb +9 -4
  60. data/lib/wukong/boot.rb +10 -1
  61. data/lib/wukong/driver.rb +65 -71
  62. data/lib/wukong/logger.rb +93 -0
  63. data/lib/wukong/processor.rb +38 -29
  64. data/lib/wukong/runner.rb +144 -0
  65. data/lib/wukong/server.rb +119 -0
  66. data/lib/wukong/spec_helpers.rb +1 -0
  67. data/lib/wukong/spec_helpers/integration_driver.rb +22 -9
  68. data/lib/wukong/spec_helpers/integration_driver_matchers.rb +26 -4
  69. data/lib/wukong/spec_helpers/processor_helpers.rb +4 -10
  70. data/lib/wukong/spec_helpers/shared_examples.rb +12 -13
  71. data/lib/wukong/version.rb +1 -1
  72. data/lib/wukong/widget/processors.rb +13 -0
  73. data/lib/wukong/widget/serializers.rb +55 -65
  74. data/lib/wukong/widgets.rb +0 -2
  75. data/spec/hanuman/graph_spec.rb +14 -0
  76. data/spec/spec_helper.rb +4 -30
  77. data/spec/support/{wukong_test_helpers.rb → example_test_helpers.rb} +29 -2
  78. data/spec/support/integration_helper.rb +38 -0
  79. data/spec/support/model_test_helpers.rb +115 -0
  80. data/spec/wu/geo/geolocated_spec.rb +247 -0
  81. data/spec/wu/model/reconcilable_spec.rb +152 -0
  82. data/spec/wukong/widget/processors_spec.rb +0 -1
  83. data/spec/wukong/widget/serializers_spec.rb +88 -62
  84. data/spec/wukong/wu_local_spec.rb +125 -0
  85. data/wukong.gemspec +3 -16
  86. metadata +72 -266
  87. data/examples/dataflow/apache_log_line.rb +0 -100
  88. data/examples/jabberwocky.txt +0 -36
  89. data/examples/munging/Gemfile +0 -8
  90. data/examples/munging/airline_flights/airline.rb +0 -57
  91. data/examples/munging/airline_flights/airport.rb +0 -211
  92. data/examples/munging/airline_flights/flight.rb +0 -156
  93. data/examples/munging/airline_flights/models.rb +0 -4
  94. data/examples/munging/airline_flights/parse.rb +0 -26
  95. data/examples/munging/airline_flights/route.rb +0 -35
  96. data/examples/munging/airline_flights/timezone_fixup.rb +0 -62
  97. data/examples/munging/airports/40_wbans.txt +0 -40
  98. data/examples/munging/airports/filter_weather_reports.rb +0 -37
  99. data/examples/munging/airports/join.pig +0 -31
  100. data/examples/munging/airports/to_tsv.rb +0 -33
  101. data/examples/munging/airports/usa_wbans.pig +0 -19
  102. data/examples/munging/airports/usa_wbans.txt +0 -2157
  103. data/examples/munging/airports/wbans.pig +0 -19
  104. data/examples/munging/airports/wbans.txt +0 -2310
  105. data/examples/munging/rake_helper.rb +0 -62
  106. data/examples/munging/weather/.gitignore +0 -1
  107. data/examples/munging/weather/Gemfile +0 -4
  108. data/examples/munging/weather/Rakefile +0 -28
  109. data/examples/munging/weather/extract_ish.rb +0 -13
  110. data/examples/munging/weather/models/weather.rb +0 -119
  111. data/examples/munging/weather/utils/noaa_downloader.rb +0 -46
  112. data/examples/munging/wikipedia/README.md +0 -34
  113. data/examples/munging/wikipedia/Rakefile +0 -193
  114. data/examples/munging/wikipedia/n1_subuniverse/n1_nodes.pig +0 -18
  115. data/examples/munging/wikipedia/page_metadata/extract_page_metadata.rb +0 -21
  116. data/examples/munging/wikipedia/page_metadata/extract_page_metadata.rb.old +0 -27
  117. data/examples/munging/wikipedia/pagelinks/augment_pagelinks.pig +0 -29
  118. data/examples/munging/wikipedia/pagelinks/extract_pagelinks.rb +0 -14
  119. data/examples/munging/wikipedia/pagelinks/extract_pagelinks.rb.old +0 -25
  120. data/examples/munging/wikipedia/pagelinks/undirect_pagelinks.pig +0 -29
  121. data/examples/munging/wikipedia/pageviews/augment_pageviews.pig +0 -32
  122. data/examples/munging/wikipedia/pageviews/extract_pageviews.rb +0 -85
  123. data/examples/munging/wikipedia/pig_style_guide.md +0 -25
  124. data/examples/munging/wikipedia/redirects/redirects_page_metadata.pig +0 -19
  125. data/examples/munging/wikipedia/subuniverse/sub_articles.pig +0 -23
  126. data/examples/munging/wikipedia/subuniverse/sub_page_metadata.pig +0 -24
  127. data/examples/munging/wikipedia/subuniverse/sub_pagelinks_from.pig +0 -22
  128. data/examples/munging/wikipedia/subuniverse/sub_pagelinks_into.pig +0 -22
  129. data/examples/munging/wikipedia/subuniverse/sub_pagelinks_within.pig +0 -26
  130. data/examples/munging/wikipedia/subuniverse/sub_pageviews.pig +0 -29
  131. data/examples/munging/wikipedia/subuniverse/sub_undirected_pagelinks_within.pig +0 -24
  132. data/examples/munging/wikipedia/utils/get_namespaces.rb +0 -86
  133. data/examples/munging/wikipedia/utils/namespaces.json +0 -1
  134. data/examples/string_reverser.rb +0 -26
  135. data/examples/twitter/locations.rb +0 -29
  136. data/examples/twitter/models.rb +0 -24
  137. data/examples/twitter/pt1-fiddle.pig +0 -8
  138. data/examples/twitter/pt2-simple_parse.pig +0 -31
  139. data/examples/twitter/pt2-simple_parse.rb +0 -18
  140. data/examples/twitter/pt3-join_on_zips.pig +0 -39
  141. data/examples/twitter/pt4-strong_links.rb +0 -20
  142. data/examples/twitter/pt5-lnglat_and_strong_links.pig +0 -16
  143. data/examples/twitter/states.tsv +0 -50
  144. data/examples/workflow/package_gem.rb +0 -55
  145. data/lib/wukong/widget/sink.rb +0 -16
  146. data/lib/wukong/widget/source.rb +0 -14
@@ -1,62 +0,0 @@
1
- require 'gorillib'
2
- require 'gorillib/data_munging'
3
- require 'configliere'
4
-
5
- S3_BUCKET = 'bigdata.chimpy.us'
6
- S3_DATA_ROOT = "s3n://#{S3_BUCKET}/data"
7
- HDFS_DATA_ROOT = '/data'
8
-
9
- Settings.define :orig_data_root, default: HDFS_DATA_ROOT, description: "directory root for input data"
10
- Settings.define :scratch_data_root, default: HDFS_DATA_ROOT, description: "directory root for scratch data"
11
- Settings.define :results_data_root, default: HDFS_DATA_ROOT, description: "directory root for results data"
12
- Settings.define :mini, description: 'Run in mini mode - operate inside the mini version of the specified universe',type: :boolean, default: false
13
- Settings.define :universe, description: 'Universe to draw data from', finally: ->(c){ c.universe ||= (c.mini? ? "mini" : "full") }
14
- Settings.define :pig_path, default: '/usr/local/bin/pig'
15
- Settings.define :local, type: :boolean, default: false
16
-
17
- def Settings.mini?; !! Settings.mini ; end # BANG BANG BANG
18
- def Settings.wu_run_cmd; (local ? '--run=local' : '--run') ; end;
19
-
20
- def dir_exists? (dir)
21
- if Settings.local
22
- return File.exists? dir
23
- else
24
- `hadoop fs -test -e #{dir}`
25
- return $?.exitstatus == 0
26
- end
27
- end
28
-
29
- def wukong(script, input, output, options={})
30
- input = Pathname.of(input)
31
- output = Pathname.of(output)
32
- if dir_exists? output
33
- puts "#{output} exists. Assuming that this job has already run..."
34
- return
35
- end
36
- opts = ['--rm']
37
- options.each_pair do |k,v|
38
- opts << "--#{k}=#{v}"
39
- end
40
- opts << input
41
- opts << output
42
- ruby(script, Settings.wu_run_cmd,*opts)
43
- end
44
-
45
- def wukong_xml(script, input, output, split_tag)
46
- wukong(script,input,output,{split_on_xml_tag: split_tag})
47
- end
48
-
49
- def pig(script_name, options={})
50
- cmd = Settings.pig_path
51
- options.each_pair do |k,v|
52
- v = Pathname.of(v) if v.is_a? Symbol
53
- if k.to_s.include? '_out' and dir_exists? v
54
- puts "#{v} already exists. Assuming that this job has already run..."
55
- return
56
- else
57
- cmd += " -param #{k.upcase}=#{v}"
58
- end
59
- end
60
- cmd += " #{script_name}"
61
- sh cmd
62
- end
@@ -1 +0,0 @@
1
- Gemfile.lock
@@ -1,4 +0,0 @@
1
- source 'http://rubygems.org'
2
-
3
- gem 'gorillib', :path => '/Users/dlaw/dev/gorillib'
4
- gem 'wukong', :path =>'/Users/dlaw/dev/wukong_og'
@@ -1,28 +0,0 @@
1
- require 'configliere'
2
- Settings.use :commandline
3
-
4
- require_relative '../rake_helper'
5
-
6
- Settings.resolve!
7
-
8
- Pathname.register_paths(
9
- project: 'noaa_ish',
10
- universe: 'full',
11
-
12
- orig: [Settings.orig_data_root,'orig'],
13
- scratch: [Settings.scratch_data_root, 'scratch'],
14
- results: [Settings.results_data_root, 'results'],
15
-
16
- #Origin
17
- noaa_ish_orig: [:orig, 'www1.ncdc.noaa.gov','pub','data','noaa'],
18
- noaa_ish_test: [:noaa_ish_orig, '010010-99999-2012'],
19
- #Results
20
- noaa_ish_results: [:results, :project, :universe],
21
- )
22
-
23
- namespace :extract do
24
- desc 'Extract the NOAA ISH weather data from flat files'
25
- task :ish do
26
- wukong('extract_ish.rb', :noaa_ish_test, :noaa_ish_results)
27
- end
28
- end
@@ -1,13 +0,0 @@
1
- #!/usr/bin/env ruby
2
- # encoding: UTF-8
3
-
4
- require 'wukong'
5
- require 'wukong/streamer/flatpack_streamer'
6
-
7
- module Weather
8
- class Mapper < Wukong::Streamer::FlatPackStreamer
9
- format "_4 i6 i5 s8 s4 sD6e3 D7e3 s5 i5 s5 s4 i3 ssD4e1ii5 ssbi6 sssD5e1 sD5e1 sD5e1 ss*"
10
- end
11
- end
12
-
13
- Wukong::Script.new(Weather::Mapper, nil).run
@@ -1,119 +0,0 @@
1
- require 'gorillib'
2
- require 'gorillib/model'
3
- require 'gorillib/model/serialization'
4
- require 'gorillib/model/positional_fields'
5
-
6
- class RawWeatherReport
7
- include Gorillib::Model
8
- include Gorillib::Model::PositionalFields
9
-
10
- field :usaf_station_id, Integer
11
-
12
- # wban id appears to have 99999 as a blank value even though
13
- # it is not specified as such in the docs
14
- field :wban_station_id, Integer
15
-
16
- field :obs_date, String
17
- field :obs_time, String
18
-
19
- field :obs_data_source, String, blankish: ["9", '', nil]
20
-
21
- field :wstn_latitude, Float, blankish: [99.999, '', nil]
22
- field :wstn_longitude, Float, blankish: [999.999, '' , nil]
23
-
24
- field :report_type_code, String, blankish: ["99999", '', nil]
25
-
26
- field :wstn_elevation, Integer, blankish: [9999, '', nil]
27
-
28
- field :wstn_call_letters, String, blankish: ["99999", '', nil]
29
-
30
- field :quality_control_process_name, String
31
-
32
- field :wind_direction, Integer, blankish: [999, '', nil]
33
- field :wind_direction_qual, String
34
- field :wind_observation_type, String, blankish: ["9", '', nil]
35
- field :wind_speed, Float, blankish: [999.9, '', nil]
36
- field :wind_speed_qual, String
37
-
38
- field :ceiling_height, Integer, blankish: [99999, '', nil]
39
- field :ceiling_qual, String
40
- field :ceiling_determination, String, blankish:['9', '', nil]
41
- field :cavok, :boolean
42
-
43
- field :visibility, Integer, blankish: [999999, '', nil]
44
- field :visibility_qual, String
45
- field :visibility_variability_code, String, blankish: ['9', '', nil]
46
- field :visibility_variability_code_qual, String
47
-
48
- field :air_temp, Float, blankish: [999.9, '', nil]
49
- field :air_temp_qual, String
50
-
51
- field :dew_point, Float, blankish: [999.9, '', nil]
52
- field :dew_point_qual, String
53
-
54
- field :sea_level_pressure, Float, blankish: [9999.9, '' , nil]
55
- field :sea_level_pressure_qual, String
56
-
57
- field :raw_extended_observations, String
58
- end
59
-
60
- class ReportMetadata
61
- include Gorillib::Model
62
- field :wind_direction_qual, String
63
- field :wind_speed_qual, String
64
- field :ceiling_qual, String
65
- field :visibility_qual, String
66
- field :visibility_variability_code_qual, String
67
- field :air_temp_qual, String
68
- field :dew_point_qual, String
69
- field :sea_level_pressure_qual, String
70
-
71
- end
72
-
73
- class WeatherReport
74
- include Gorillib::Model
75
-
76
- field :wstn_id, String #wban-usad
77
-
78
- field :wstn_latitude, Float
79
- field :wstn_longitude, Float
80
- field :wstn_elevation, Float
81
-
82
- field :obs_date, String
83
- field :obs_time, String
84
-
85
- field :wind_direction, Integer
86
- field :wind_observation_type, String
87
- field :wind_speed, Float
88
-
89
- field :ceiling_height, Integer
90
- field :ceiling_determination, String
91
- field :cavok, :boolean
92
-
93
- field :visibility, Integer
94
- field :visibility_variability_code, :boolean
95
-
96
- field :air_temp, Float
97
-
98
- field :dew_point, Float
99
-
100
- field :sea_level_pressure, Float
101
-
102
- field :metadata, ReportMetadata, default: ReportMetadata.new
103
-
104
- def receive!(hsh={})
105
- # prune the quality fields
106
- hsh.keys.each do |key|
107
- next if (key.to_s =~ /[^_]*_qual/).nil?
108
- val = hsh.delete(key)
109
- metadata.send("receive_#{key.to_s}", val)
110
- end
111
- # transform the ids
112
- if hsh.keys.include? :usaf_station_id and hsh.keys.include? :wban_station_id
113
- id = hsh.delete(:usaf_station_id).to_s
114
- id += "-#{hsh.delete :wban_station_id}"
115
- hsh[:wstn_id] = id
116
- end
117
- super(hsh)
118
- end
119
- end
@@ -1,46 +0,0 @@
1
- #!/usr/bin/env ruby
2
- # encoding:UTF-8
3
-
4
- require 'open-uri'
5
- require 'configliere'
6
-
7
- NOAA_URL = 'http://www1.ncdc.noaa.gov/pub/data/noaa/'
8
- Settings.use :commandline
9
-
10
- Settings({
11
- years: [1901],
12
- verbose: false,
13
- out_dir: /data/rawd/noaa/isd/,
14
- un_gzip: false,
15
- })
16
-
17
- Settings.define :years, flag 'y', description: "Years to download"
18
- Settings.define :verbose, flag 'v', description: "Get chatty", type: :boolean
19
- Settings.define :un_gzip, flag 'g', description: "Unzip the files as they are uploaded", type: :boolean
20
- Settings.define :out_dir, flag 'o', description: "The directory in the hdfs to put the files"
21
-
22
- Settings.resolve!
23
-
24
- def get_files_for_year(year)
25
- year_page = open("#{NOAA_URL}/#{year}")
26
- years = []
27
- year_page.each_line do |line|
28
- next unless line =~ /<a href="[^.]*\.gz">/
29
- match = /<a href="([^.]*\.gz)">/.match(line)
30
- years << match[1] if not match.nil?
31
- end
32
- return years
33
- end
34
-
35
- years.each do |year|
36
- puts "Uploading files for year #{year}..." if Settings[:verbose]
37
- get_files_for_year(year).each do |file|
38
- puts " Uploading #{file}..." if Settings[:verbose]
39
- path = "#{NOAA_URL}/#{year}/#{file}"
40
- if Settings[:un_gzip]
41
- `curl '#{path}' | zcat | hdp-put #{Settings[:out_dir]}/#{year}/#{file}`
42
- else
43
- `curl #{file} | hdp-put #{Settings[:out_dir]}/#{year}/#{file}`
44
- end
45
- end
46
- end
@@ -1,34 +0,0 @@
1
- ## Encodings
2
- All SQL dumps are theoretically encoded in UTF-8, but the Wikipedia dumps contain malformed characters. You might see a 'Invalid UTF-8 byte sequence' error when running a Wukong because of this.
3
-
4
- To fix this, use `guard_encoding` in `MungingUtils` to filter out malformed characters before attempting to process them. `guard_encoding` replaces all invalid characters with '�'.
5
-
6
- If you need to ensure that all characters are valid UTF-8 when piping things around on the command line, then pipe your stream through `char_filter.rb`.
7
-
8
- If you need an invalid UTF-8 character, pretty much any single-byte character above \x79 will do. e.g:
9
-
10
- > char = "\x80"
11
- => "\x80"
12
- > char.encoding.name
13
- => "UTF-8"
14
- > char.valid_encoding?
15
- => false
16
-
17
- [James Gray's blog](http://blog.grayproductions.net/articles/understanding_m17n) is really valuable for further reading on this.
18
-
19
- ## Dates
20
- Date information should be formatted as follows:
21
-
22
- +----------+--------+--------------------------+-------------+
23
- | int | int | long or float | int |
24
- +----------+--------+--------------------------+-------------+
25
- | YYYYMMDD | HHMMSS | Seconds since Unix epoch | Day of week |
26
- +----------+--------+--------------------------+-------------+
27
-
28
- Should always be in the UTC time zone.
29
-
30
- Hours go from 0 to 23
31
-
32
- Months go from 01 to 12
33
-
34
- Day of week goes from 0 to 6 (Sunday to Saturday)
@@ -1,193 +0,0 @@
1
- require 'configliere'
2
- Settings.use :commandline
3
-
4
- require_relative '../rake_helper'
5
-
6
- DUMPS = ['20110722','20110803','20110901','20111007','20111115',
7
- '20111201','20120104','20120211','20120307','20120403',
8
- '20120502','20120601','20120702','20120802']
9
-
10
- Settings.define :pageviews_date_range_slug_in, description: 'The pageviews date range', default: '2012/2012-08'
11
- Settings.define :pageviews_date_range_slug_out, description: 'The pageviews date range', default: '2012/2012-08'
12
- Settings.define :dump, description: 'The wikipedia dump to use', default: DUMPS[-1]
13
- Settings.define :n1_node_id, description: 'Node to construct the N1 subuniverse around', default: '13692155'
14
- Settings.define :n1_subuniverse, description: 'The output universe for N1 subuniverse generation', finally: ->(c) {c.n1_subuniverse ||= "n1_#{c.n1_node_id}"}
15
- Settings.define :num_reducers, type: Integer, default: nil
16
- Settings.resolve!
17
-
18
- if (not DUMPS.include? Settings.dump)
19
- puts "Invalid dump specified. Must be one of [#{DUMPS.join(', ')}].\nExiting..."
20
- exit
21
- end
22
-
23
- =begin
24
- Universe is the universe that data is drawn from.
25
- It is also the default universe the data is written into.
26
- There are tasks (namely subuniverse generation) that do not write out
27
- into the supplied universe. Be careful
28
- =end
29
-
30
- Pathname.register_paths(
31
- project: 'wikipedia',
32
- universe: [Settings.universe],
33
-
34
- orig: [Settings.orig_data_root,'ripd'],
35
- scratch: [Settings.scratch_data_root, 'scratch'],
36
- results: [Settings.results_data_root, 'results'],
37
-
38
- #Origin
39
- wiki_dumps: [:orig,'dumps.wikimedia.org'],
40
- orig_enwiki: [:wiki_dumps, 'enwiki'],
41
- orig_pageviews: [:wiki_dumps, 'other', 'pagecounts-raw', Settings.pageviews_date_range_slug_in],
42
- orig_articles: [:orig_enwiki, Settings.dump, "enwiki-#{Settings.dump}-pages-articles.xml.gz"],
43
- orig_pages: [:orig_enwiki, Settings.dump, "enwiki-#{Settings.dump}-page.sql.gz"],
44
- orig_pagelinks: [:orig_enwiki, Settings.dump,"enwiki-#{Settings.dump}-pagelinks.sql.gz"],
45
-
46
- # Scratch
47
- wiki_scratch: [:scratch, :project, :universe],
48
- page_metadata_scratch: [:wiki_scratch,'page_metadata'],
49
- articles_scratch: [:wiki_scratch, 'articles'],
50
- pageviews_scratch: [:wiki_scratch, 'pageviews',Settings.pageviews_date_range_slug_out],
51
- pagelinks_scratch: [:wiki_scratch, 'pagelinks'],
52
-
53
- # Results
54
- wiki_results: [:results, :project, :universe],
55
- page_metadata_results: [:wiki_results, 'page_metadata'],
56
- pageviews_results: [:wiki_results, 'pageviews'],
57
- articles_results: [:wiki_results, 'articles'],
58
- pagelinks_results: [:wiki_results, 'pagelinks'],
59
- undirected_pagelinks_results: [:wiki_results, 'undirected_pagelinks'],
60
- redirects_pagelinks_results: [:wiki_results, 'redirects_pagelinks'],
61
- redirects_page_metadata_results: [:wiki_results, 'redirects_page_metadata'],
62
-
63
- # N1 Subuniverse
64
- n1_results: [:results,'wikipedia', Settings.n1_subuniverse],
65
- n1_nodes_results: [:n1_results, 'nodes'],
66
- n1_edges_results: [:n1_results, 'edges'],
67
- n1_page_metadata_results: [:n1_results, 'page_metadata'],
68
- n1_articles_results: [:n1_results, 'articles'],
69
- n1_pageviews_results: [:n1_results, 'pageviews'],
70
-
71
- )
72
-
73
- namespace :utils do
74
- desc 'Fetch a list of all Wikipedia namespaces and their IDs'
75
- task :get_namespaces do
76
- if File.exists 'utils/namespaces.json'
77
- puts 'utils/namespaces.json exists... Assuming that namespaces have already been downloaded'
78
- return
79
- end
80
- ruby('utils/get_namespaces.rb')
81
- end
82
- end
83
- namespace :extract do
84
- desc 'Extract the Wikipedia article corpus from bzipped XML files'
85
- task :articles do
86
- wukong_xml('articles/extract_articles.rb', :orig_articles, :articles_results)
87
- end
88
-
89
- desc 'Extract the Wikipedia pages table from gzipped SQL dumps'
90
- task :page_metadata do
91
- wukong('page_metadata/extract_page_metadata.rb', :orig_pages, :page_metadata_results)
92
- end
93
-
94
- desc 'Extract Wikipedia pageview data from gzipped server logs'
95
- task :pageviews do
96
- if Settings.num_reducers.nil?
97
- wukong('pageviews/extract_pageviews.rb', :orig_pageviews, :pageviews_scratch)
98
- else
99
- wukong('pageviews/extract_pageviews.rb', :orig_pageviews, :pageviews_scratch,{reduce_tasks: Settings.num_reducers})
100
- end
101
- end
102
-
103
- desc 'Extract Wikipedia pagelinks data from gzipped SQL dumps'
104
- task :pagelinks do
105
- wukong('pagelinks/extract_pagelinks.rb', :orig_pagelinks, :pagelinks_scratch)
106
- end
107
- end
108
- namespace :augment do
109
- desc 'Augment extracted Wikipedia pageview data with page ID and other metadata'
110
- task :pageviews => ["extract:pageviews", "extract:page_metadata"] do
111
- pig('pageviews/augment_pageviews.pig',{
112
- page_metadata: :page_metadata_results,
113
- extracted_pageviews: :pageviews_scratch,
114
- augmented_pageviews_out: :pageviews_results,
115
- })
116
- end
117
-
118
- desc 'Augment Wikipedia pagelinks data with page metadata'
119
- task :pagelinks => ["extract:pagelinks","extract:page_metadata"] do
120
- pig('pagelinks/augment_pagelinks.pig',{
121
- page_metadata: :page_metadata_results,
122
- extracted_pagelinks: :pagelinks_scratch,
123
- augmented_pagelinks_out: :pagelinks_results,
124
- })
125
- end
126
-
127
- desc 'Undirect the Wikipedia pagelinks graph'
128
- task :pagelinks_undirect => "augment:pagelinks" do
129
- pig('pagelinks/undirect_pagelinks.pig',{
130
- augmented_pagelinks: :pagelinks_results,
131
- undirected_pagelinks_out: :pagelinks_undirected_results,
132
- })
133
- end
134
- end
135
- namespace :n1 do
136
- desc 'Generate a list of node ids for the N1 neighborhood of the specified node'
137
- task :nodes => 'augment:pagelinks_undirect' do
138
- pig('n1_subuniverse/n1_nodes.pig',{
139
- undirected_pagelinks: :undirected_pagelinks_results,
140
- hub: Settings.n1_node_id,
141
- n1_nodes_out: :n1_nodes_results,
142
- })
143
- end
144
- desc 'Extract pagelinks for the N1 neighborhood of the specified node'
145
- task :undirected_pagelinks => ['augment:pagelinks_undirect', :nodes] do
146
- pig('subuniverse/sub_undirected_pagelinks_within.pig',{
147
- undirected_pagelinks: :undirected_pagelinks_results,
148
- sub_nodes: :n1_nodes_results,
149
- sub_pagelinks_out: :n1_edges_results,
150
- })
151
- end
152
- desc 'Extract page metadata for the N1 neighborhood of the specified node'
153
- task :page_metadata => ['augment:page_metadata', :nodes] do
154
- pig('subuniverse/sub_page_metadata.pig',{
155
- page_metadata: :page_metadata_results,
156
- sub_nodes: :n1_nodes_results,
157
- sub_page_metadata_out: :n1_page_metadata_results,
158
- })
159
- end
160
- desc 'Extract articles for the N1 neighborhood of the specified node'
161
- task :articles => ['extract:articles', :nodes] do
162
- pig('subuniverse/sub_articles.pig',{
163
- articles: :articles_results,
164
- sub_nodes: :n1_nodes_results,
165
- sub_articles_out: :n1_articles_results,
166
- })
167
- end
168
- desc 'Extract pageview data for the N1 neighborhood of the specified node'
169
- task :pageviews => ['augment:pageviews', :nodes] do
170
- pig('subuniverse/sub_pageviews.pig',{
171
- pageviews: :pageviews_results,
172
- sub_nodes: :n1_nodes_results,
173
- sub_pageviews_out: :n1_pageviews_results,
174
- })
175
- end
176
- end
177
- namespace :redirects do
178
- desc 'Extract redirects from pagemetadata table'
179
- task :redirects_page_metadata => 'extract:page_metadata' do
180
- pig('redirects/redirects_page_metadata.pig',{
181
- page_metadata: :page_metadata_results,
182
- redirects_out: :redirects_page_metadata_results,
183
- })
184
- end
185
- desc 'Extract redirect links from pagelinks table'
186
- task :redirect_pagelinks => ['redirects_page_metadata','augment:pagelinks'] do
187
- pig('subuniverse/sub_pagelinks_from.pig',{
188
- pagelinks: :pagelinks_results,
189
- sub_nodes: :redirects_page_metadata_results,
190
- sub_pagelinks_out: :redirects_pagelinks_results,
191
- })
192
- end
193
- end