ruby-spark 1.1.0.1-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (180) hide show
  1. checksums.yaml +7 -0
  2. data/.gitignore +37 -0
  3. data/Gemfile +47 -0
  4. data/Guardfile +5 -0
  5. data/LICENSE.txt +22 -0
  6. data/README.md +252 -0
  7. data/Rakefile +35 -0
  8. data/TODO.md +6 -0
  9. data/benchmark/aggregate.rb +33 -0
  10. data/benchmark/bisect.rb +88 -0
  11. data/benchmark/comparison/prepare.sh +18 -0
  12. data/benchmark/comparison/python.py +156 -0
  13. data/benchmark/comparison/r.r +69 -0
  14. data/benchmark/comparison/ruby.rb +167 -0
  15. data/benchmark/comparison/run-all.sh +160 -0
  16. data/benchmark/comparison/scala.scala +181 -0
  17. data/benchmark/custom_marshal.rb +94 -0
  18. data/benchmark/digest.rb +150 -0
  19. data/benchmark/enumerator.rb +88 -0
  20. data/benchmark/serializer.rb +82 -0
  21. data/benchmark/sort.rb +43 -0
  22. data/benchmark/sort2.rb +164 -0
  23. data/benchmark/take.rb +28 -0
  24. data/bin/ruby-spark +8 -0
  25. data/example/pi.rb +28 -0
  26. data/example/website_search.rb +83 -0
  27. data/ext/ruby_c/extconf.rb +3 -0
  28. data/ext/ruby_c/murmur.c +158 -0
  29. data/ext/ruby_c/murmur.h +9 -0
  30. data/ext/ruby_c/ruby-spark.c +18 -0
  31. data/ext/ruby_java/Digest.java +36 -0
  32. data/ext/ruby_java/Murmur2.java +98 -0
  33. data/ext/ruby_java/RubySparkExtService.java +28 -0
  34. data/ext/ruby_java/extconf.rb +3 -0
  35. data/ext/spark/build.sbt +73 -0
  36. data/ext/spark/project/plugins.sbt +9 -0
  37. data/ext/spark/sbt/sbt +34 -0
  38. data/ext/spark/src/main/scala/Exec.scala +91 -0
  39. data/ext/spark/src/main/scala/MLLibAPI.scala +4 -0
  40. data/ext/spark/src/main/scala/Marshal.scala +52 -0
  41. data/ext/spark/src/main/scala/MarshalDump.scala +113 -0
  42. data/ext/spark/src/main/scala/MarshalLoad.scala +220 -0
  43. data/ext/spark/src/main/scala/RubyAccumulatorParam.scala +69 -0
  44. data/ext/spark/src/main/scala/RubyBroadcast.scala +13 -0
  45. data/ext/spark/src/main/scala/RubyConstant.scala +13 -0
  46. data/ext/spark/src/main/scala/RubyMLLibAPI.scala +55 -0
  47. data/ext/spark/src/main/scala/RubyMLLibUtilAPI.scala +21 -0
  48. data/ext/spark/src/main/scala/RubyPage.scala +34 -0
  49. data/ext/spark/src/main/scala/RubyRDD.scala +392 -0
  50. data/ext/spark/src/main/scala/RubySerializer.scala +14 -0
  51. data/ext/spark/src/main/scala/RubyTab.scala +11 -0
  52. data/ext/spark/src/main/scala/RubyUtils.scala +15 -0
  53. data/ext/spark/src/main/scala/RubyWorker.scala +257 -0
  54. data/ext/spark/src/test/scala/MarshalSpec.scala +84 -0
  55. data/lib/ruby-spark.rb +1 -0
  56. data/lib/spark.rb +198 -0
  57. data/lib/spark/accumulator.rb +260 -0
  58. data/lib/spark/broadcast.rb +98 -0
  59. data/lib/spark/build.rb +43 -0
  60. data/lib/spark/cli.rb +169 -0
  61. data/lib/spark/command.rb +86 -0
  62. data/lib/spark/command/base.rb +158 -0
  63. data/lib/spark/command/basic.rb +345 -0
  64. data/lib/spark/command/pair.rb +124 -0
  65. data/lib/spark/command/sort.rb +51 -0
  66. data/lib/spark/command/statistic.rb +144 -0
  67. data/lib/spark/command_builder.rb +141 -0
  68. data/lib/spark/command_validator.rb +34 -0
  69. data/lib/spark/config.rb +238 -0
  70. data/lib/spark/constant.rb +14 -0
  71. data/lib/spark/context.rb +322 -0
  72. data/lib/spark/error.rb +50 -0
  73. data/lib/spark/ext/hash.rb +41 -0
  74. data/lib/spark/ext/integer.rb +25 -0
  75. data/lib/spark/ext/io.rb +67 -0
  76. data/lib/spark/ext/ip_socket.rb +29 -0
  77. data/lib/spark/ext/module.rb +58 -0
  78. data/lib/spark/ext/object.rb +24 -0
  79. data/lib/spark/ext/string.rb +24 -0
  80. data/lib/spark/helper.rb +10 -0
  81. data/lib/spark/helper/logger.rb +40 -0
  82. data/lib/spark/helper/parser.rb +85 -0
  83. data/lib/spark/helper/serialize.rb +71 -0
  84. data/lib/spark/helper/statistic.rb +93 -0
  85. data/lib/spark/helper/system.rb +42 -0
  86. data/lib/spark/java_bridge.rb +19 -0
  87. data/lib/spark/java_bridge/base.rb +203 -0
  88. data/lib/spark/java_bridge/jruby.rb +23 -0
  89. data/lib/spark/java_bridge/rjb.rb +41 -0
  90. data/lib/spark/logger.rb +76 -0
  91. data/lib/spark/mllib.rb +100 -0
  92. data/lib/spark/mllib/classification/common.rb +31 -0
  93. data/lib/spark/mllib/classification/logistic_regression.rb +223 -0
  94. data/lib/spark/mllib/classification/naive_bayes.rb +97 -0
  95. data/lib/spark/mllib/classification/svm.rb +135 -0
  96. data/lib/spark/mllib/clustering/gaussian_mixture.rb +82 -0
  97. data/lib/spark/mllib/clustering/kmeans.rb +118 -0
  98. data/lib/spark/mllib/matrix.rb +120 -0
  99. data/lib/spark/mllib/regression/common.rb +73 -0
  100. data/lib/spark/mllib/regression/labeled_point.rb +41 -0
  101. data/lib/spark/mllib/regression/lasso.rb +100 -0
  102. data/lib/spark/mllib/regression/linear.rb +124 -0
  103. data/lib/spark/mllib/regression/ridge.rb +97 -0
  104. data/lib/spark/mllib/ruby_matrix/matrix_adapter.rb +53 -0
  105. data/lib/spark/mllib/ruby_matrix/vector_adapter.rb +57 -0
  106. data/lib/spark/mllib/stat/distribution.rb +12 -0
  107. data/lib/spark/mllib/vector.rb +185 -0
  108. data/lib/spark/rdd.rb +1377 -0
  109. data/lib/spark/sampler.rb +92 -0
  110. data/lib/spark/serializer.rb +79 -0
  111. data/lib/spark/serializer/auto_batched.rb +59 -0
  112. data/lib/spark/serializer/base.rb +63 -0
  113. data/lib/spark/serializer/batched.rb +84 -0
  114. data/lib/spark/serializer/cartesian.rb +13 -0
  115. data/lib/spark/serializer/compressed.rb +27 -0
  116. data/lib/spark/serializer/marshal.rb +17 -0
  117. data/lib/spark/serializer/message_pack.rb +23 -0
  118. data/lib/spark/serializer/oj.rb +23 -0
  119. data/lib/spark/serializer/pair.rb +41 -0
  120. data/lib/spark/serializer/text.rb +25 -0
  121. data/lib/spark/sort.rb +189 -0
  122. data/lib/spark/stat_counter.rb +125 -0
  123. data/lib/spark/storage_level.rb +39 -0
  124. data/lib/spark/version.rb +3 -0
  125. data/lib/spark/worker/master.rb +144 -0
  126. data/lib/spark/worker/spark_files.rb +15 -0
  127. data/lib/spark/worker/worker.rb +200 -0
  128. data/ruby-spark.gemspec +47 -0
  129. data/spec/generator.rb +37 -0
  130. data/spec/inputs/lorem_300.txt +316 -0
  131. data/spec/inputs/numbers/1.txt +50 -0
  132. data/spec/inputs/numbers/10.txt +50 -0
  133. data/spec/inputs/numbers/11.txt +50 -0
  134. data/spec/inputs/numbers/12.txt +50 -0
  135. data/spec/inputs/numbers/13.txt +50 -0
  136. data/spec/inputs/numbers/14.txt +50 -0
  137. data/spec/inputs/numbers/15.txt +50 -0
  138. data/spec/inputs/numbers/16.txt +50 -0
  139. data/spec/inputs/numbers/17.txt +50 -0
  140. data/spec/inputs/numbers/18.txt +50 -0
  141. data/spec/inputs/numbers/19.txt +50 -0
  142. data/spec/inputs/numbers/2.txt +50 -0
  143. data/spec/inputs/numbers/20.txt +50 -0
  144. data/spec/inputs/numbers/3.txt +50 -0
  145. data/spec/inputs/numbers/4.txt +50 -0
  146. data/spec/inputs/numbers/5.txt +50 -0
  147. data/spec/inputs/numbers/6.txt +50 -0
  148. data/spec/inputs/numbers/7.txt +50 -0
  149. data/spec/inputs/numbers/8.txt +50 -0
  150. data/spec/inputs/numbers/9.txt +50 -0
  151. data/spec/inputs/numbers_0_100.txt +101 -0
  152. data/spec/inputs/numbers_1_100.txt +100 -0
  153. data/spec/lib/collect_spec.rb +42 -0
  154. data/spec/lib/command_spec.rb +68 -0
  155. data/spec/lib/config_spec.rb +64 -0
  156. data/spec/lib/context_spec.rb +165 -0
  157. data/spec/lib/ext_spec.rb +72 -0
  158. data/spec/lib/external_apps_spec.rb +45 -0
  159. data/spec/lib/filter_spec.rb +80 -0
  160. data/spec/lib/flat_map_spec.rb +100 -0
  161. data/spec/lib/group_spec.rb +109 -0
  162. data/spec/lib/helper_spec.rb +19 -0
  163. data/spec/lib/key_spec.rb +41 -0
  164. data/spec/lib/manipulation_spec.rb +122 -0
  165. data/spec/lib/map_partitions_spec.rb +87 -0
  166. data/spec/lib/map_spec.rb +91 -0
  167. data/spec/lib/mllib/classification_spec.rb +54 -0
  168. data/spec/lib/mllib/clustering_spec.rb +35 -0
  169. data/spec/lib/mllib/matrix_spec.rb +32 -0
  170. data/spec/lib/mllib/regression_spec.rb +116 -0
  171. data/spec/lib/mllib/vector_spec.rb +77 -0
  172. data/spec/lib/reduce_by_key_spec.rb +118 -0
  173. data/spec/lib/reduce_spec.rb +131 -0
  174. data/spec/lib/sample_spec.rb +46 -0
  175. data/spec/lib/serializer_spec.rb +88 -0
  176. data/spec/lib/sort_spec.rb +58 -0
  177. data/spec/lib/statistic_spec.rb +170 -0
  178. data/spec/lib/whole_text_files_spec.rb +33 -0
  179. data/spec/spec_helper.rb +38 -0
  180. metadata +389 -0
@@ -0,0 +1,45 @@
1
+ require 'spec_helper'
2
+
3
+ RSpec::describe Spark::RDD do
4
+
5
+ context '.pipe' do
6
+ let(:words) { Generator.words }
7
+ let(:numbers) { Generator.numbers }
8
+
9
+ it 'single program' do
10
+ skip if windows?
11
+
12
+ rdd = $sc.parallelize(words, 1)
13
+ rdd = rdd.pipe('tr a b')
14
+
15
+ result = words.dup
16
+ result.map! do |x|
17
+ x.gsub('a', 'b')
18
+ end
19
+
20
+ expect(rdd.collect).to eql(result)
21
+ end
22
+
23
+ it 'multiple program' do
24
+ skip if windows?
25
+
26
+ rdd = $sc.parallelize(numbers, 1)
27
+ rdd = rdd.pipe("tr 1 5", "awk '{print $1*10}'")
28
+ rdd = rdd.map(lambda{|x| x.to_i * 100})
29
+
30
+ result = numbers.dup
31
+ result.map! do |x|
32
+ x.to_s.gsub('1', '5')
33
+ end
34
+ result.map! do |x|
35
+ x.to_i * 10
36
+ end
37
+ result.map! do |x|
38
+ x * 100
39
+ end
40
+
41
+ expect(rdd.collect).to eql(result)
42
+ end
43
+ end
44
+
45
+ end
@@ -0,0 +1,80 @@
1
+ require "spec_helper"
2
+
3
+ def func4(item)
4
+ item.start_with?("a") && item.size > 3 && item[1].to_s.ord > 106
5
+ end
6
+
7
+ RSpec::shared_examples "a filtering" do |workers|
8
+ context "with #{workers || 'default'} worker" do
9
+ it "when numbers" do
10
+ rdd2 = rdd_numbers(workers)
11
+ rdd2 = rdd2.filter(func1)
12
+ result = numbers.select(&func1)
13
+
14
+ expect(rdd2.collect).to eql(result)
15
+
16
+ rdd3 = rdd_numbers(workers)
17
+ rdd3 = rdd3.filter(func1)
18
+ rdd3 = rdd3.filter(func2)
19
+
20
+ expect(rdd3.collect).to eql([])
21
+ end
22
+
23
+ it "when words" do
24
+ rdd2 = rdd_words(workers)
25
+ rdd2 = rdd2.filter(func3)
26
+ result = words.select{|x| func3.call(x)}
27
+
28
+ expect(rdd2.collect).to eql(result)
29
+
30
+ rdd3 = rdd_words(workers)
31
+ rdd3 = rdd3.filter(method(:func4))
32
+ result = words.select{|x| func4(x)}
33
+
34
+ expect(rdd3.collect).to eql(result)
35
+ end
36
+ end
37
+ end
38
+
39
+ RSpec::describe "Spark::RDD.filter" do
40
+ let(:func1) { lambda{|x| x.to_i.even?} }
41
+ let(:func2) { lambda{|x| x.to_i.odd?} }
42
+ let(:func3) { lambda{|x| x.to_s.start_with?("b")} }
43
+
44
+ context "throught parallelize" do
45
+ let(:numbers) { Generator.numbers_with_zero }
46
+ let(:words) { Generator.words }
47
+
48
+ def rdd_numbers(workers)
49
+ $sc.parallelize(numbers, workers)
50
+ end
51
+
52
+ def rdd_words(workers)
53
+ $sc.parallelize(words, workers)
54
+ end
55
+
56
+ it_behaves_like "a filtering", nil
57
+ it_behaves_like "a filtering", 1
58
+ it_behaves_like "a filtering", rand(2..10)
59
+ end
60
+
61
+ context "throught text_file" do
62
+ let(:file_numbers) { File.join("spec", "inputs", "numbers_0_100.txt") }
63
+ let(:file_words) { File.join("spec", "inputs", "lorem_300.txt") }
64
+
65
+ let(:numbers) { File.readlines(file_numbers).map(&:strip) }
66
+ let(:words) { File.readlines(file_words).map(&:strip) }
67
+
68
+ def rdd_numbers(workers)
69
+ $sc.text_file(file_numbers, workers)
70
+ end
71
+
72
+ def rdd_words(workers)
73
+ $sc.text_file(file_words, workers)
74
+ end
75
+
76
+ it_behaves_like "a filtering", nil
77
+ it_behaves_like "a filtering", 1
78
+ it_behaves_like "a filtering", rand(2..10)
79
+ end
80
+ end
@@ -0,0 +1,100 @@
1
+ require "spec_helper"
2
+
3
+ RSpec::shared_examples "a flat mapping" do |workers|
4
+ it "with #{workers || 'default'} worker" do
5
+ rdd2 = rdd(workers).map(func1)
6
+ result = numbers.flat_map(&func1)
7
+
8
+ expect(rdd2.collect).to eql(result)
9
+
10
+ rdd3 = rdd(workers)
11
+ rdd3 = rdd3.flat_map(func1)
12
+ rdd3 = rdd3.flat_map(func2)
13
+ rdd3 = rdd3.flat_map(func3)
14
+ result = numbers.flat_map(&func1).flat_map(&func2).flat_map(&func3)
15
+
16
+ expect(rdd3.collect).to eql(result)
17
+
18
+ rdd4 = rdd(workers)
19
+ rdd4 = rdd4.flat_map(func1)
20
+ rdd4 = rdd4.flat_map(func2)
21
+ rdd4 = rdd4.flat_map(func3)
22
+
23
+ expect(rdd4.collect).to eql(rdd3.collect)
24
+ end
25
+ end
26
+
27
+ RSpec::shared_examples "a flat mapping values" do |workers|
28
+ it "with #{workers || 'default'} worker" do
29
+ rdd2 = rdd(workers).flat_map_values(func1)
30
+ result = []
31
+ hash_with_values.each do |(key, values)|
32
+ values = func1.call(values).flatten
33
+ values.each do |value|
34
+ result << [key, value]
35
+ end
36
+ end
37
+
38
+ expect(rdd2.collect).to eql(result)
39
+
40
+ rdd2 = rdd(workers).flat_map_values(func2)
41
+ result = []
42
+ hash_with_values.each do |(key, values)|
43
+ values = func2.call(values).flatten
44
+ values.each do |value|
45
+ result << [key, value]
46
+ end
47
+ end
48
+
49
+ expect(rdd2.collect).to eql(result)
50
+ end
51
+ end
52
+
53
+ RSpec::describe "Spark::RDD" do
54
+ let(:func1) { lambda{|x| x*2} }
55
+ let(:func2) { lambda{|x| [x*3, 1, 1]} }
56
+ let(:func3) { lambda{|x| [x*4, 2, 2]} }
57
+
58
+ context "throught parallelize" do
59
+ context ".flat_map" do
60
+ let(:numbers) { Generator.numbers_with_zero }
61
+
62
+ def rdd(workers)
63
+ $sc.parallelize(numbers, workers)
64
+ end
65
+
66
+ it_behaves_like "a flat mapping", nil
67
+ it_behaves_like "a flat mapping", 1
68
+ it_behaves_like "a flat mapping", rand(2..10)
69
+ end
70
+
71
+ context ".flat_map_values" do
72
+ let(:func1) { lambda{|x| x*2} }
73
+ let(:func2) { lambda{|x| [x.first]} }
74
+ let(:hash_with_values) { Generator.hash_with_values }
75
+
76
+ def rdd(workers)
77
+ $sc.parallelize(hash_with_values, workers)
78
+ end
79
+
80
+ it_behaves_like "a flat mapping values", nil
81
+ it_behaves_like "a flat mapping values", 1
82
+ it_behaves_like "a flat mapping values", rand(2..10)
83
+ end
84
+ end
85
+
86
+ context "throught text_file" do
87
+ context ".flat_map" do
88
+ let(:file) { File.join("spec", "inputs", "numbers_0_100.txt") }
89
+ let(:numbers) { File.readlines(file).map(&:strip) }
90
+
91
+ def rdd(workers)
92
+ $sc.text_file(file, workers)
93
+ end
94
+
95
+ it_behaves_like "a flat mapping", nil
96
+ it_behaves_like "a flat mapping", 1
97
+ it_behaves_like "a flat mapping", rand(2..10)
98
+ end
99
+ end
100
+ end
@@ -0,0 +1,109 @@
1
+ require "spec_helper"
2
+
3
+ RSpec::shared_examples "a groupping by key" do |workers|
4
+ it "with #{workers || 'default'} worker" do
5
+ expect(rdd_result(workers)).to eql(result)
6
+ end
7
+ end
8
+
9
+ RSpec::shared_examples "a cogroupping by key" do |workers|
10
+ context "with #{workers || 'default'} worker" do
11
+ it ".group_with" do
12
+ rdd = rdd_1(workers).group_with(rdd_2(workers))
13
+ expect(rdd.collect_as_hash).to eql(result_12)
14
+ end
15
+
16
+ it ".cogroup" do
17
+ rdd = rdd_1(workers).cogroup(rdd_2(workers), rdd_3(workers))
18
+ expect(rdd.collect_as_hash).to eql(result_123)
19
+ end
20
+ end
21
+ end
22
+
23
+ RSpec::shared_examples "a groupping by" do |workers|
24
+ it "with #{workers || 'default'} worker" do
25
+ rdd = rdd_numbers(workers)
26
+ rdd = rdd.group_by(key_function1)
27
+
28
+ expect(rdd.collect_as_hash).to eql(numbers.group_by(&key_function1))
29
+
30
+ rdd = rdd_words(workers)
31
+ rdd = rdd.group_by(key_function2)
32
+
33
+ expect(rdd.collect_as_hash).to eql(words.group_by(&key_function2))
34
+ end
35
+ end
36
+
37
+ RSpec::describe "Spark::RDD" do
38
+
39
+ def make_result(*hashes)
40
+ _result = {}
41
+ hashes.each do |data|
42
+ data.each do |key, value|
43
+ _result[key] ||= []
44
+ _result[key] << value
45
+ end
46
+ end
47
+ _result
48
+ end
49
+
50
+ context ".group_by_key" do
51
+ let(:hash) { Generator.hash }
52
+ let(:result) { make_result(hash) }
53
+
54
+ def rdd_result(workers)
55
+ rdd = $sc.parallelize(hash)
56
+ rdd.group_by_key.collect_as_hash
57
+ end
58
+
59
+ it_behaves_like "a groupping by key", nil
60
+ it_behaves_like "a groupping by key", 1
61
+ it_behaves_like "a groupping by key", rand(2..10)
62
+ end
63
+
64
+ context "cogroup" do
65
+ let(:hash1) { Generator.hash }
66
+ let(:hash2) { Generator.hash }
67
+ let(:hash3) { Generator.hash }
68
+
69
+ let(:result_12) { make_result(hash1, hash2) }
70
+ let(:result_123) { make_result(hash1, hash2, hash3) }
71
+
72
+ def rdd_1(workers)
73
+ $sc.parallelize(hash1)
74
+ end
75
+
76
+ def rdd_2(workers)
77
+ $sc.parallelize(hash2)
78
+ end
79
+
80
+ def rdd_3(workers)
81
+ $sc.parallelize(hash3)
82
+ end
83
+
84
+ it_behaves_like "a cogroupping by key", nil
85
+ it_behaves_like "a cogroupping by key", 1
86
+ it_behaves_like "a cogroupping by key", rand(2..10)
87
+ end
88
+
89
+ context "group_by" do
90
+ let(:key_function1) { lambda{|x| x%2} }
91
+ let(:key_function2) { lambda{|x| x.size} }
92
+
93
+ let(:numbers) { Generator.numbers }
94
+ let(:words) { Generator.words }
95
+
96
+ def rdd_numbers(workers)
97
+ $sc.parallelize(numbers)
98
+ end
99
+
100
+ def rdd_words(workers)
101
+ $sc.parallelize(words)
102
+ end
103
+
104
+ it_behaves_like "a groupping by", nil
105
+ it_behaves_like "a groupping by", 1
106
+ it_behaves_like "a groupping by", rand(2..10)
107
+ end
108
+
109
+ end
@@ -0,0 +1,19 @@
1
+ require "spec_helper"
2
+
3
+ RSpec::configure do |c|
4
+ c.include Spark::Helper::Parser
5
+ end
6
+
7
+ RSpec::describe Spark::Helper do
8
+
9
+ it "memory size" do
10
+ expect(to_memory_size("512mb")).to eql(524288.0)
11
+ expect(to_memory_size("1586 mb")).to eql(1624064.0)
12
+ expect(to_memory_size("3 MB")).to eql(3072.0)
13
+ expect(to_memory_size("9gb")).to eql(9437184.0)
14
+ expect(to_memory_size("9gb", "mb")).to eql(9216.0)
15
+ expect(to_memory_size("9mb", "gb")).to eql(0.01)
16
+ expect(to_memory_size("6652548796kb", "mb")).to eql(6496629.68)
17
+ end
18
+
19
+ end
@@ -0,0 +1,41 @@
1
+ require "spec_helper"
2
+
3
+ RSpec::shared_examples "a keying by" do |workers|
4
+ it "with #{workers || 'default'} worker" do
5
+ rdd = rdd_numbers(workers)
6
+ rdd = rdd.key_by(key_function1)
7
+
8
+ result = numbers.map{|item| [key_function1.call(item), item]}
9
+ expect(rdd.collect).to eql(result)
10
+
11
+ rdd = rdd_words(workers)
12
+ rdd = rdd.key_by(key_function2)
13
+
14
+ result = words.map{|item| [key_function2.call(item), item]}
15
+ expect(rdd.collect).to eql(result)
16
+ end
17
+ end
18
+
19
+ RSpec::describe "Spark::RDD" do
20
+
21
+ context "key_by" do
22
+ let(:key_function1) { lambda{|x| x.even?} }
23
+ let(:key_function2) { lambda{|x| x.include?("a")} }
24
+
25
+ let(:numbers) { Generator.numbers }
26
+ let(:words) { Generator.words }
27
+
28
+ def rdd_numbers(workers)
29
+ $sc.parallelize(numbers)
30
+ end
31
+
32
+ def rdd_words(workers)
33
+ $sc.parallelize(words)
34
+ end
35
+
36
+ it_behaves_like "a keying by", nil
37
+ it_behaves_like "a keying by", 1
38
+ it_behaves_like "a keying by", rand(2..10)
39
+ end
40
+
41
+ end
@@ -0,0 +1,122 @@
1
+ require "spec_helper"
2
+
3
+ RSpec::describe "Spark::RDD" do
4
+ let(:numbers) { 1..100 }
5
+ let(:rand_numbers) { Generator.numbers }
6
+
7
+ it ".glom" do
8
+ rdd = $sc.parallelize(numbers, 1).glom
9
+ expect(rdd.collect).to eql([numbers.to_a])
10
+
11
+ ser = Spark::Serializer.build { __batched__(__marshal__, 1) }
12
+
13
+ rdd = $sc.parallelize(numbers, 5, ser).glom
14
+ expect(rdd.collect).to eql(numbers.each_slice(20).to_a)
15
+ end
16
+
17
+ it ".coalesce" do
18
+ rdd = $sc.parallelize(numbers, 5)
19
+
20
+ rdd2 = rdd.glom
21
+ expect(rdd2.collect.size).to eql(5)
22
+
23
+ rdd3 = rdd.coalesce(4).glom
24
+ expect(rdd3.collect.size).to eql(4)
25
+ end
26
+
27
+ it ".distinct" do
28
+ rdd = $sc.parallelize(rand_numbers, 5)
29
+ rdd = rdd.distinct
30
+ expect(rdd.collect.sort).to eql(rand_numbers.uniq.sort)
31
+
32
+ rdd = $sc.parallelize(numbers, 5)
33
+ rdd = rdd.map(lambda{|x| 1})
34
+ rdd = rdd.distinct
35
+ expect(rdd.collect).to eql([1])
36
+ end
37
+
38
+ context ".union" do
39
+ it "classic method" do
40
+ rdd = $sc.parallelize(numbers, 5)
41
+ rdd = rdd.union(rdd).collect
42
+
43
+ expect(rdd.collect.sort).to eql((numbers.to_a+numbers.to_a).sort)
44
+ end
45
+
46
+ it "with a different serializer" do
47
+ rdd1 = $sc.parallelize(numbers, 1, Spark::Serializer.build{ __batched__(__marshal__) })
48
+ rdd2 = $sc.parallelize(numbers, 1, Spark::Serializer.build{ __batched__(__oj__) })
49
+
50
+ expect { rdd1.union(rdd2).collect }.to_not raise_error
51
+ end
52
+
53
+ it "as operator" do
54
+ rdd1 = $sc.parallelize(numbers)
55
+ rdd2 = $sc.parallelize(rand_numbers)
56
+
57
+ expect((rdd1+rdd2).sum).to eql((numbers.to_a+rand_numbers).reduce(:+))
58
+ end
59
+ end
60
+
61
+ it ".compact" do
62
+ data = [nil, nil , 0, 0, 1, 2, nil, 6]
63
+ result = data.compact
64
+ ser = Spark::Serializer.build { __batched__(__marshal__, 1) }
65
+
66
+ rdd = $sc.parallelize(data, 1).compact
67
+ expect(rdd.collect).to eql(result)
68
+
69
+ rdd = $sc.parallelize(data, 5, ser).compact
70
+ expect(rdd.collect).to eql(result)
71
+
72
+ rdd = $sc.parallelize(data, 1, ser).compact
73
+ expect(rdd.collect).to eql(result)
74
+ end
75
+
76
+ it ".intersection" do
77
+ data1 = [0,1,2,3,4,5,6,7,8,9,10]
78
+ data2 = [5,6,7,8,9,10,11,12,13,14,15]
79
+
80
+ rdd1 = $sc.parallelize(data1)
81
+ rdd2 = $sc.parallelize(data2)
82
+
83
+ expect(rdd1.intersection(rdd2).collect.sort).to eql(data1 & data2)
84
+ end
85
+
86
+ it ".shuffle" do
87
+ data = Generator.numbers
88
+ rdd = $sc.parallelize(data)
89
+
90
+ expect(rdd.shuffle.collect).to_not eql(data)
91
+ end
92
+
93
+ context ".cartesian" do
94
+ let(:data1) { Generator.numbers(100) }
95
+ let(:data2) { Generator.numbers(100) }
96
+ let(:result) { data1.product(data2).map(&:to_s).sort }
97
+
98
+ it "unbatched" do
99
+ ser = Spark::Serializer.build { __batched__(__marshal__, 1) }
100
+
101
+ rdd1 = $sc.parallelize(data1, 2, ser)
102
+ rdd2 = $sc.parallelize(data2, 2, ser)
103
+
104
+ rdd = rdd1.cartesian(rdd2).map(lambda{|x| x.to_s})
105
+
106
+ expect(rdd.collect.sort).to eql(result)
107
+ end
108
+
109
+ it "batched" do
110
+ ser1 = Spark::Serializer.build { __batched__(__marshal__, rand(4..10)) }
111
+ ser2 = Spark::Serializer.build { __batched__(__marshal__, rand(4..10)) }
112
+
113
+ rdd1 = $sc.parallelize(data1, 2, ser1)
114
+ rdd2 = $sc.parallelize(data2, 2, ser2)
115
+
116
+ rdd = rdd1.cartesian(rdd2).map(lambda{|x| x.to_s})
117
+
118
+ expect(rdd.collect.sort).to eql(result)
119
+ end
120
+ end
121
+
122
+ end