consadole_aggregator 0.0.4 → 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (166) hide show
  1. data/.document +5 -0
  2. data/.rspec +1 -0
  3. data/Gemfile +10 -0
  4. data/Gemfile.lock +37 -0
  5. data/LICENSE.txt +20 -0
  6. data/README.rdoc +33 -0
  7. data/Rakefile +41 -22
  8. data/VERSION +1 -1
  9. data/account.yaml +6 -0
  10. data/bin/consadole_aggregator +47 -0
  11. data/consadole_aggregator.gemspec +193 -152
  12. data/db/.gitignore +0 -0
  13. data/lib/consadole_aggregator/helper.rb +20 -0
  14. data/lib/consadole_aggregator/live/timeline.rb +13 -0
  15. data/lib/consadole_aggregator/live.rb +79 -0
  16. data/lib/consadole_aggregator/news.rb +165 -0
  17. data/lib/consadole_aggregator.rb +5 -19
  18. data/log/.gitignore +0 -0
  19. data/spec/consadole_aggregator/helper_spec.rb +27 -0
  20. data/spec/consadole_aggregator/live/timeline_spec.rb +53 -0
  21. data/spec/consadole_aggregator/live_spec.rb +139 -0
  22. data/spec/consadole_aggregator/news_spec.rb +156 -0
  23. data/spec/ext/asahi.txt +1068 -0
  24. data/spec/ext/consaburn.txt +660 -0
  25. data/spec/ext/consaclub.txt +677 -0
  26. data/spec/ext/consadolenews.txt +251 -0
  27. data/spec/ext/consadolephotos.txt +5 -0
  28. data/spec/ext/consadolesponsornews.txt +229 -0
  29. data/spec/ext/forzaconsadole.txt +1038 -0
  30. data/spec/ext/hochiyomiuri.txt +783 -0
  31. data/spec/ext/jsgoalnews.txt +421 -0
  32. data/spec/ext/jsgoalphotos.txt +322 -0
  33. data/spec/ext/nikkansports.txt +478 -0
  34. data/spec/spec.opts +1 -1
  35. data/spec/spec_helper.rb +11 -4
  36. metadata +240 -157
  37. data/.gitignore +0 -3
  38. data/lib/consadole_aggregator/live/builder.rb +0 -44
  39. data/lib/consadole_aggregator/live/parser.rb +0 -26
  40. data/lib/consadole_aggregator/nikkan_sports.rb +0 -61
  41. data/spec/consadole_aggregator_spec.rb +0 -2
  42. data/spec/nikkan_sports/consadole.rdf +0 -478
  43. data/spec/nikkan_sports/nikkan_sports_spec.rb +0 -62
  44. data/spec/nikkan_sports/p-sc-tp0-20091225-579346.html +0 -1547
  45. data/spec/nikkan_sports/p-sc-tp0-20100204-592466.html +0 -1538
  46. /data/spec/{timeline → ext/live}/s674.html +0 -0
  47. /data/spec/{timeline → ext/live}/s674.html.1 +0 -0
  48. /data/spec/{timeline → ext/live}/s674.html.10 +0 -0
  49. /data/spec/{timeline → ext/live}/s674.html.100 +0 -0
  50. /data/spec/{timeline → ext/live}/s674.html.101 +0 -0
  51. /data/spec/{timeline → ext/live}/s674.html.102 +0 -0
  52. /data/spec/{timeline → ext/live}/s674.html.103 +0 -0
  53. /data/spec/{timeline → ext/live}/s674.html.104 +0 -0
  54. /data/spec/{timeline → ext/live}/s674.html.105 +0 -0
  55. /data/spec/{timeline → ext/live}/s674.html.106 +0 -0
  56. /data/spec/{timeline → ext/live}/s674.html.107 +0 -0
  57. /data/spec/{timeline → ext/live}/s674.html.108 +0 -0
  58. /data/spec/{timeline → ext/live}/s674.html.109 +0 -0
  59. /data/spec/{timeline → ext/live}/s674.html.11 +0 -0
  60. /data/spec/{timeline → ext/live}/s674.html.110 +0 -0
  61. /data/spec/{timeline → ext/live}/s674.html.111 +0 -0
  62. /data/spec/{timeline → ext/live}/s674.html.112 +0 -0
  63. /data/spec/{timeline → ext/live}/s674.html.113 +0 -0
  64. /data/spec/{timeline → ext/live}/s674.html.114 +0 -0
  65. /data/spec/{timeline → ext/live}/s674.html.115 +0 -0
  66. /data/spec/{timeline → ext/live}/s674.html.116 +0 -0
  67. /data/spec/{timeline → ext/live}/s674.html.117 +0 -0
  68. /data/spec/{timeline → ext/live}/s674.html.118 +0 -0
  69. /data/spec/{timeline → ext/live}/s674.html.119 +0 -0
  70. /data/spec/{timeline → ext/live}/s674.html.12 +0 -0
  71. /data/spec/{timeline → ext/live}/s674.html.120 +0 -0
  72. /data/spec/{timeline → ext/live}/s674.html.13 +0 -0
  73. /data/spec/{timeline → ext/live}/s674.html.14 +0 -0
  74. /data/spec/{timeline → ext/live}/s674.html.15 +0 -0
  75. /data/spec/{timeline → ext/live}/s674.html.16 +0 -0
  76. /data/spec/{timeline → ext/live}/s674.html.17 +0 -0
  77. /data/spec/{timeline → ext/live}/s674.html.18 +0 -0
  78. /data/spec/{timeline → ext/live}/s674.html.19 +0 -0
  79. /data/spec/{timeline → ext/live}/s674.html.2 +0 -0
  80. /data/spec/{timeline → ext/live}/s674.html.20 +0 -0
  81. /data/spec/{timeline → ext/live}/s674.html.21 +0 -0
  82. /data/spec/{timeline → ext/live}/s674.html.22 +0 -0
  83. /data/spec/{timeline → ext/live}/s674.html.23 +0 -0
  84. /data/spec/{timeline → ext/live}/s674.html.24 +0 -0
  85. /data/spec/{timeline → ext/live}/s674.html.25 +0 -0
  86. /data/spec/{timeline → ext/live}/s674.html.26 +0 -0
  87. /data/spec/{timeline → ext/live}/s674.html.27 +0 -0
  88. /data/spec/{timeline → ext/live}/s674.html.28 +0 -0
  89. /data/spec/{timeline → ext/live}/s674.html.29 +0 -0
  90. /data/spec/{timeline → ext/live}/s674.html.3 +0 -0
  91. /data/spec/{timeline → ext/live}/s674.html.30 +0 -0
  92. /data/spec/{timeline → ext/live}/s674.html.31 +0 -0
  93. /data/spec/{timeline → ext/live}/s674.html.32 +0 -0
  94. /data/spec/{timeline → ext/live}/s674.html.33 +0 -0
  95. /data/spec/{timeline → ext/live}/s674.html.34 +0 -0
  96. /data/spec/{timeline → ext/live}/s674.html.35 +0 -0
  97. /data/spec/{timeline → ext/live}/s674.html.36 +0 -0
  98. /data/spec/{timeline → ext/live}/s674.html.37 +0 -0
  99. /data/spec/{timeline → ext/live}/s674.html.38 +0 -0
  100. /data/spec/{timeline → ext/live}/s674.html.39 +0 -0
  101. /data/spec/{timeline → ext/live}/s674.html.4 +0 -0
  102. /data/spec/{timeline → ext/live}/s674.html.40 +0 -0
  103. /data/spec/{timeline → ext/live}/s674.html.41 +0 -0
  104. /data/spec/{timeline → ext/live}/s674.html.42 +0 -0
  105. /data/spec/{timeline → ext/live}/s674.html.43 +0 -0
  106. /data/spec/{timeline → ext/live}/s674.html.44 +0 -0
  107. /data/spec/{timeline → ext/live}/s674.html.45 +0 -0
  108. /data/spec/{timeline → ext/live}/s674.html.46 +0 -0
  109. /data/spec/{timeline → ext/live}/s674.html.47 +0 -0
  110. /data/spec/{timeline → ext/live}/s674.html.48 +0 -0
  111. /data/spec/{timeline → ext/live}/s674.html.49 +0 -0
  112. /data/spec/{timeline → ext/live}/s674.html.5 +0 -0
  113. /data/spec/{timeline → ext/live}/s674.html.50 +0 -0
  114. /data/spec/{timeline → ext/live}/s674.html.51 +0 -0
  115. /data/spec/{timeline → ext/live}/s674.html.52 +0 -0
  116. /data/spec/{timeline → ext/live}/s674.html.53 +0 -0
  117. /data/spec/{timeline → ext/live}/s674.html.54 +0 -0
  118. /data/spec/{timeline → ext/live}/s674.html.55 +0 -0
  119. /data/spec/{timeline → ext/live}/s674.html.56 +0 -0
  120. /data/spec/{timeline → ext/live}/s674.html.57 +0 -0
  121. /data/spec/{timeline → ext/live}/s674.html.58 +0 -0
  122. /data/spec/{timeline → ext/live}/s674.html.59 +0 -0
  123. /data/spec/{timeline → ext/live}/s674.html.6 +0 -0
  124. /data/spec/{timeline → ext/live}/s674.html.60 +0 -0
  125. /data/spec/{timeline → ext/live}/s674.html.61 +0 -0
  126. /data/spec/{timeline → ext/live}/s674.html.62 +0 -0
  127. /data/spec/{timeline → ext/live}/s674.html.63 +0 -0
  128. /data/spec/{timeline → ext/live}/s674.html.64 +0 -0
  129. /data/spec/{timeline → ext/live}/s674.html.65 +0 -0
  130. /data/spec/{timeline → ext/live}/s674.html.66 +0 -0
  131. /data/spec/{timeline → ext/live}/s674.html.67 +0 -0
  132. /data/spec/{timeline → ext/live}/s674.html.68 +0 -0
  133. /data/spec/{timeline → ext/live}/s674.html.69 +0 -0
  134. /data/spec/{timeline → ext/live}/s674.html.7 +0 -0
  135. /data/spec/{timeline → ext/live}/s674.html.70 +0 -0
  136. /data/spec/{timeline → ext/live}/s674.html.71 +0 -0
  137. /data/spec/{timeline → ext/live}/s674.html.72 +0 -0
  138. /data/spec/{timeline → ext/live}/s674.html.73 +0 -0
  139. /data/spec/{timeline → ext/live}/s674.html.74 +0 -0
  140. /data/spec/{timeline → ext/live}/s674.html.75 +0 -0
  141. /data/spec/{timeline → ext/live}/s674.html.76 +0 -0
  142. /data/spec/{timeline → ext/live}/s674.html.77 +0 -0
  143. /data/spec/{timeline → ext/live}/s674.html.78 +0 -0
  144. /data/spec/{timeline → ext/live}/s674.html.79 +0 -0
  145. /data/spec/{timeline → ext/live}/s674.html.8 +0 -0
  146. /data/spec/{timeline → ext/live}/s674.html.80 +0 -0
  147. /data/spec/{timeline → ext/live}/s674.html.81 +0 -0
  148. /data/spec/{timeline → ext/live}/s674.html.82 +0 -0
  149. /data/spec/{timeline → ext/live}/s674.html.83 +0 -0
  150. /data/spec/{timeline → ext/live}/s674.html.84 +0 -0
  151. /data/spec/{timeline → ext/live}/s674.html.85 +0 -0
  152. /data/spec/{timeline → ext/live}/s674.html.86 +0 -0
  153. /data/spec/{timeline → ext/live}/s674.html.87 +0 -0
  154. /data/spec/{timeline → ext/live}/s674.html.88 +0 -0
  155. /data/spec/{timeline → ext/live}/s674.html.89 +0 -0
  156. /data/spec/{timeline → ext/live}/s674.html.9 +0 -0
  157. /data/spec/{timeline → ext/live}/s674.html.90 +0 -0
  158. /data/spec/{timeline → ext/live}/s674.html.91 +0 -0
  159. /data/spec/{timeline → ext/live}/s674.html.92 +0 -0
  160. /data/spec/{timeline → ext/live}/s674.html.93 +0 -0
  161. /data/spec/{timeline → ext/live}/s674.html.94 +0 -0
  162. /data/spec/{timeline → ext/live}/s674.html.95 +0 -0
  163. /data/spec/{timeline → ext/live}/s674.html.96 +0 -0
  164. /data/spec/{timeline → ext/live}/s674.html.97 +0 -0
  165. /data/spec/{timeline → ext/live}/s674.html.98 +0 -0
  166. /data/spec/{timeline → ext/live}/s674.html.99 +0 -0
@@ -0,0 +1,20 @@
1
+ module ConsadoleAggregator
2
+ class Helper
3
+ def self.concat text, opt=Hash.new('')
4
+ base = "#{text} #{opt[:url]} #{opt[:hashtag]}".squeeze(' ').rstrip
5
+ if base.size > 140
6
+ over_size = base.size - 140
7
+ concat(omit(text, over_size), opt)
8
+ else
9
+ base
10
+ end
11
+ end
12
+
13
+ private
14
+ def self.omit text, over_size
15
+ truncated = text.slice(0...-over_size)
16
+ truncated[-3..-1] = '...'
17
+ truncated
18
+ end
19
+ end
20
+ end
@@ -0,0 +1,13 @@
1
+ # -*- coding: utf-8 -*-
2
+ module ConsadoleAggregator::Live
3
+ Timeline = Struct.new(:time, :post)
4
+ class Timeline
5
+ def self.parse line
6
+ return nil if line.nil? || line.empty? || line =~ /(<前半>)|(<後半>)/
7
+ Timeline.new(*line.split(' '))
8
+ end
9
+ def to_s
10
+ ('%s %s'%[time, post]).squeeze.rstrip
11
+ end
12
+ end
13
+ end
@@ -0,0 +1,79 @@
1
+ # -*- coding: utf-8 -*-
2
+ require 'logger'
3
+ require 'uri'
4
+ require 'kconv'
5
+ require 'nokogiri'
6
+ require 'net/http'
7
+ require_relative 'live/timeline.rb'
8
+
9
+ module ConsadoleAggregator
10
+ module Live
11
+ BASE_URI = URI.parse('http://www.consadole-sapporo.jp/view/s674.html')
12
+
13
+ def self.reserve reservation_time=nil, opt ={}
14
+ Live.new(reservation_time, opt)
15
+ end
16
+
17
+ def self.get_resource
18
+ Net::HTTP.get(BASE_URI).toutf8
19
+ end
20
+
21
+ def self.parse
22
+ doc = Nokogiri::HTML.parse(get_resource)
23
+ doc.search('hr + p').last.inner_html.split(/<br>|\n/).reverse.each_with_object([]) do |line, memo|
24
+ timeline = Timeline.parse line
25
+ memo << timeline if timeline
26
+ end
27
+ end
28
+
29
+ class Live
30
+ attr_reader :reservation_time, :posted, :times, :wait_sec
31
+
32
+ def initialize reservation_time=nil, opt ={}
33
+ @reservation_time = reservation_time
34
+ @posted = []
35
+ @wait_sec = opt[:wait_sec] || 30
36
+ @times = opt[:times] || (60/@wait_sec)*120 # サッカーは120分あれば終わる
37
+ @logger = opt[:logger] || Logger.new(File.expand_path(File.dirname(__FILE__) + '/../../log/live.log'))
38
+ end
39
+
40
+ def execute &block
41
+ be_daemonize
42
+ wait_initial
43
+ @logger.info 'start of loop'
44
+ @times.times do |i|
45
+ @logger.debug "#{i} times"
46
+ update &block rescue @logger.error $!
47
+ wait_interval
48
+ end
49
+ @logger.info 'end of loop'
50
+ end
51
+
52
+ def wait_initial
53
+ return unless @reservation_time
54
+ diff_sec = @reservation_time - Time.now
55
+ wait_sec = diff_sec > 0 ? diff_sec : 0
56
+ @logger.info "initial wait #{wait_sec} seconds"
57
+ sleep wait_sec
58
+ end
59
+
60
+ def wait_interval
61
+ sleep @wait_sec
62
+ end
63
+
64
+ def update
65
+ new_timeline = ConsadoleAggregator::Live.parse - @posted
66
+ new_timeline.each do |timeline|
67
+ @logger.debug timeline
68
+ yield timeline if block_given?
69
+ @posted << timeline
70
+ end
71
+ end
72
+
73
+ private
74
+ def be_daemonize
75
+ Process.daemon
76
+ end
77
+ end
78
+ end
79
+ end
@@ -0,0 +1,165 @@
1
+ # -*- coding: utf-8 -*-
2
+ require 'logger'
3
+ require 'rss'
4
+ require 'uri'
5
+ require 'kconv'
6
+ require 'net/http'
7
+ require 'yaml'
8
+ require 'nokogiri'
9
+
10
+ module ConsadoleAggregator
11
+ module Aggregatable
12
+ def get_new_articles
13
+ get_resource = self.class.get_resource
14
+ parse_list = self.class.parse_list
15
+ parse_article = self.class.parse_article
16
+ raise NotImplementedError unless get_resource && parse_list && parse_article
17
+ list_url = get_resource.call
18
+ article_urls = parse_list.call(list_url)
19
+ article_urls.each_with_object([]) do |article_url, memo|
20
+ article = parse_article.call(article_url)
21
+ memo.push(article) if article && !get_strage.include?(article)
22
+ end
23
+ end
24
+
25
+ def update
26
+ get_new_articles.each do |article|
27
+ begin
28
+ yield article if block_given?
29
+ @strage << article
30
+ rescue
31
+ @logger.error $!
32
+ end
33
+ end
34
+ save_strage
35
+ end
36
+
37
+ def get_strage
38
+ @strage ||= YAML.load_file(build_strage_path) || [] # fix when YAML.load_file is nil
39
+ rescue
40
+ @strage = []
41
+ end
42
+
43
+ def save_strage
44
+ YAML.dump(@strage, File.new(build_strage_path, 'w'))
45
+ end
46
+
47
+ def build_strage_path
48
+ class_name = /([^:]+)$/.match(self.class.to_s)[1]
49
+ File.expand_path "db/#{class_name}.yaml"
50
+ end
51
+
52
+ # define class method's
53
+ def self.included(mod)
54
+ mod.extend ClassMethods
55
+ end
56
+
57
+ module ClassMethods
58
+ attr_accessor :get_resource, :parse_list, :parse_article
59
+ end
60
+ end
61
+
62
+ module News
63
+ def self.get_resource(url_path)
64
+ Net::HTTP.get(URI.parse(url_path)).toutf8
65
+ end
66
+
67
+ def self.trace(url_path, limit=10)
68
+ raise ArgumentError, 'http redirect too deep' if limit == 0
69
+
70
+ case response = Net::HTTP.get_response(URI.parse(url_path))
71
+ when Net::HTTPSuccess then url_path
72
+ when Net::HTTPRedirection then trace(response['Location'], limit - 1)
73
+ else
74
+ response.error!
75
+ end
76
+ end
77
+
78
+ {
79
+ Nikkansports:
80
+ [
81
+ ->{ get_resource('http://www.nikkansports.com/rss/soccer/jleague/consadole.rdf') },
82
+ ->(list){ RSS::Parser.parse(list, false).items.map{ |e| { url:e.link, title:e.title } }.reverse },
83
+ ->(article){ article }
84
+ ],
85
+ Hochiyomiuri:
86
+ [
87
+ ->{ get_resource('http://hochi.yomiuri.co.jp/hokkaido/soccer/index.htm') },
88
+ ->(list){ Nokogiri::HTML(list).search('div.list1 > ul > li a').reverse },
89
+ ->(article){ { url:"http://hochi.yomiuri.co.jp/hokkaido/soccer#{article['href']}", title:article.text } if article.text =~ /…札幌$/ }
90
+ ],
91
+ Asahi:
92
+ [
93
+ ->{ get_resource('http://mytown.asahi.com/hokkaido/newslist.php?d_id=0100019') },
94
+ ->(list){ Nokogiri::HTML(list).search('ul.list > li a').reverse },
95
+ ->(article){ { url:"http://mytown.asahi.com/hokkaido/#{article['href']}", title:article.text } }
96
+ ],
97
+ Forzaconsadole:
98
+ [
99
+ ->{ get_resource('http://www.hokkaido-np.co.jp/news/e_index/?g=consadole') },
100
+ ->(list){ Nokogiri::HTML(list).search('ul.iSwBox > li > a').reverse },
101
+ ->(article){ { url:article['href'], title:article.text } }
102
+ ],
103
+ Consaburn:
104
+ [
105
+ ->{ get_resource('http://www.hokkaido-np.co.jp/cont/consa-burn/index.html') },
106
+ ->(list){ Nokogiri::HTML(list).search('ul#news_list > li > a').reverse },
107
+ ->(article){ { url:article['href'], title:article.text } }
108
+ ],
109
+ Consaclub:
110
+ [
111
+ ->{ get_resource('http://www.hokkaido-np.co.jp/cont/consa-club/index.html') },
112
+ ->(list){ Nokogiri::HTML(list).search('ul#news_list > li > a').reverse },
113
+ ->(article){ { url:article['href'], title:article.text } }
114
+ ],
115
+ Consadolenews:
116
+ [
117
+ ->{ get_resource('http://www.consadole-sapporo.jp/news/diary.cgi') },
118
+ ->(list){ Nokogiri::HTML(list).search('table.frametable > tr a').reverse },
119
+ ->(article){ { url:article['href'], title:article.text } }
120
+ ],
121
+ Consadolesponsornews:
122
+ [
123
+ ->{ get_resource('http://www.consadole-sapporo.jp/snews/diary.cgi') },
124
+ ->(list){ Nokogiri::HTML(list).search('table.frametable > tr a').reverse },
125
+ ->(article){ { url:article['href'], title:article.text } }
126
+ ],
127
+ Consadolephotos:
128
+ [
129
+ ->{ get_resource('http://www.consadole-sapporo.jp/comment.txt') },
130
+ ->(list){ list.split("\n").reverse },
131
+ ->(article){
132
+ photo = article.match(/^&?text(?<number>\d\d)=(?<title>.+)/)
133
+ { url:"http://www.consadole-sapporo.jp/img/#{photo[:number]}.jpg", title:photo[:title] }
134
+ }
135
+ ],
136
+ Jsgoalnews:
137
+ [
138
+ ->{ get_resource('http://feeds.feedburner.com/jsgoal/jsgoal?format=xml') },
139
+ ->(list){
140
+ RSS::Parser.parse(list, false).items.each_with_object([]){ |e, memo|
141
+ memo << { url:trace(e.link), title:e.title } if e.title.include?('札幌')
142
+ }.reverse },
143
+ ->(article){ article }
144
+ ],
145
+ Jsgoalphotos:
146
+ [
147
+ ->{ get_resource('http://feeds.feedburner.com/jsgoal/photo?format=xml') },
148
+ ->(list){
149
+ RSS::Parser.parse(list, false).items.each_with_object([]){ |e, memo|
150
+ memo << { url:trace(e.link), title:e.title } if e.title.include?('札幌')
151
+ }.reverse },
152
+ ->(article){ article }
153
+ ],
154
+ }.each do |k,v|
155
+ klass = Class.new do
156
+ include Aggregatable
157
+ @get_resource, @parse_list, @parse_article = *v
158
+ def initialize logger=nil
159
+ @logger = logger || Logger.new(File.expand_path(File.dirname(__FILE__) + '/../../log/news.log'))
160
+ end
161
+ end
162
+ const_set(k, klass)
163
+ end
164
+ end
165
+ end
@@ -1,21 +1,7 @@
1
- # -*- coding: utf-8 -*-
2
- require 'net/http'
3
- require 'consadole_aggregator/nikkan_sports'
4
- require 'consadole_aggregator/live'
1
+ require 'logger'
2
+ require_relative 'consadole_aggregator/helper.rb'
3
+ require_relative 'consadole_aggregator/live.rb'
4
+ require_relative 'consadole_aggregator/news.rb'
5
5
 
6
- if RUBY_VERSION < '1.9'
7
- # strptime は1.9 features なので、1.8 対応のモンキーパッチ
8
- class Time
9
- def Time.strptime(base, format)
10
- ary = base.split(/[^\d]/).delete_if{ |x| x.empty? }
11
- case ary.size
12
- when 3
13
- Time.mktime(Time.now.year, Time.now.month, ary[0], ary[1], ary[2], 0, 0)
14
- when 5
15
- Time.mktime(ary[0], ary[1], ary[2], ary[3], ary[4], 0, 0)
16
- else
17
- Time.now
18
- end
19
- end
20
- end
6
+ module ConsadoleAggregator
21
7
  end
data/log/.gitignore ADDED
File without changes
@@ -0,0 +1,27 @@
1
+ # -*- coding: utf-8 -*-
2
+ require_relative '../spec_helper'
3
+
4
+ describe ConsadoleAggregator::Helper do
5
+ describe :concatenate do
6
+ context 'with argument 140 chars' do
7
+ subject { ConsadoleAggregator::Helper.concat('い' * 140) }
8
+ it { should have(140).item }
9
+ it { should == 'い' * 140 }
10
+ end
11
+ context 'with argument 140 chars and 18 chars url' do
12
+ subject { ConsadoleAggregator::Helper.concat('ろ' * 140, url:'http://example.jp/') }
13
+ it { should have(140).item }
14
+ it { should be_end_with 'ろ... http://example.jp/' }
15
+ end
16
+ context 'with argument 140 chars and 10 chars hashtag' do
17
+ subject { ConsadoleAggregator::Helper.concat('は' * 140, hashtag:'#consadole') }
18
+ it { should have(140).item }
19
+ it { should be_end_with 'は... #consadole' }
20
+ end
21
+ context 'with argument 140 chars and 18 chars url and 10 chars hashtag' do
22
+ subject { ConsadoleAggregator::Helper.concat('に' * 140, url:'http://example.jp/', hashtag:'#consadole') }
23
+ it { should have(140).item }
24
+ it { should be_end_with 'に... http://example.jp/ #consadole' }
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,53 @@
1
+ # -*- coding: utf-8 -*-
2
+ require_relative '../../spec_helper'
3
+
4
+ include ConsadoleAggregator::Live
5
+
6
+ describe Timeline do
7
+ describe '.parse' do
8
+ context 'given nil' do
9
+ subject{ Timeline.parse(nil) }
10
+ it{ should be_nil }
11
+ end
12
+ context 'given ""' do
13
+ subject{ Timeline.parse("") }
14
+ it{ should be_nil }
15
+ end
16
+ context 'given "&lt;前半&gt;"' do
17
+ subject{ Timeline.parse('&lt;前半&gt;') }
18
+ it{ should be_nil }
19
+ end
20
+ context 'given "1分 右サイドからボールをつながれ攻撃を仕掛けられるが札幌DFが落ち着いてクリア"' do
21
+ subject{ Timeline.parse('1分 右サイドからボールをつながれ攻撃を仕掛けられるが札幌DFが落ち着いてクリア') }
22
+ it{ should eql Timeline.new('1分', '右サイドからボールをつながれ攻撃を仕掛けられるが札幌DFが落ち着いてクリア') }
23
+ end
24
+ context 'given "5分"' do
25
+ subject{ Timeline.parse('5分') }
26
+ it{ should eql Timeline.new('5分', nil) }
27
+ end
28
+ end
29
+ describe '#to_s' do
30
+ before do
31
+ @timeline = Timeline.new
32
+ end
33
+ context 'when empty' do
34
+ subject{ @timeline.to_s }
35
+ it{ should == '' }
36
+ end
37
+ context 'when only time' do
38
+ before do
39
+ @timeline.time = '1分'
40
+ end
41
+ subject{ @timeline.to_s }
42
+ it{ should == '1分' }
43
+ end
44
+ context 'when filled time and post' do
45
+ before do
46
+ @timeline.time = '1分'
47
+ @timeline.post = '右サイドからボールをつながれ攻撃を仕掛けられるが札幌DFが落ち着いてクリア'
48
+ end
49
+ subject{ @timeline.to_s }
50
+ it{ should == '1分 右サイドからボールをつながれ攻撃を仕掛けられるが札幌DFが落ち着いてクリア' }
51
+ end
52
+ end
53
+ end
@@ -0,0 +1,139 @@
1
+ # -*- coding: utf-8 -*-
2
+ require_relative '../spec_helper'
3
+
4
+ include ConsadoleAggregator
5
+
6
+ describe ConsadoleAggregator do
7
+ describe Live do
8
+ describe '.get_resource' do
9
+ it 'should exec Net::HTTP.get with Live::BASE_URI' do
10
+ Net::HTTP.should_receive(:get).with(Live::BASE_URI).and_return(File.read(File.dirname(__FILE__) + '/../ext/live/s674.html'))
11
+ Live.get_resource
12
+ end
13
+ end
14
+
15
+ describe '.parse' do
16
+ context 'when start of game' do
17
+ before { Live.stub!(:get_resource).and_return(File.read(File.dirname(__FILE__) + '/../ext/live/s674.html').toutf8) }
18
+ subject{ Live.parse }
19
+ it{ should have(3).items }
20
+ end
21
+ context 'when end of game' do
22
+ before { Live.stub!(:get_resource).and_return(File.read(File.dirname(__FILE__) + '/../ext/live/s674.html.120').toutf8) }
23
+ describe 'first TimeLine' do
24
+ subject{ Live.parse.first }
25
+ its(:time){ should == '試合開始' }
26
+ its(:post){ should == '札幌ボールでキックオフ' }
27
+ end
28
+ describe 'second TimeLine' do
29
+ subject{ Live.parse[1] }
30
+ its(:time){ should == '1分' }
31
+ its(:post){ should == '右サイドからボールをつながれ攻撃を仕掛けられるが札幌DFが落ち着いてクリア' }
32
+ end
33
+ describe 'last TimeLine' do
34
+ subject{ Live.parse.last }
35
+ its(:time){ should == '試合終了' }
36
+ its(:post){ should == 'ロスタイムも余裕のプレーで相手の攻撃を許さず、3試合連続完封で3連勝を飾る' }
37
+ end
38
+ end
39
+ end
40
+
41
+ describe '.reserve' do
42
+ context 'given Time' do
43
+ it 'give constructor with Time ' do
44
+ Live::Live.should_receive(:new).with(Time.parse('2011-02-14 13:00'), {})
45
+ Live.reserve(Time.parse('2011-02-14 13:00'))
46
+ end
47
+ end
48
+ end
49
+ end
50
+
51
+ describe Live::Live do
52
+ describe '#execute' do
53
+ before { Live.stub!(:get_resource).and_return(File.read(File.dirname(__FILE__) + '/../ext/live/s674.html.120').toutf8) }
54
+ context 'when normal update' do
55
+ subject{ Live::Live.new }
56
+ it 'should to be be_daemonize' do
57
+ subject.should_receive(:be_daemonize).ordered
58
+ subject.should_receive(:wait_initial).ordered
59
+ subject.should_receive(:update).ordered.exactly(240).times
60
+ subject.should_receive(:sleep).with(30).exactly(240).times
61
+ subject.execute
62
+ end
63
+ end
64
+ context 'when raise Exception' do
65
+ before do
66
+ @live = Live::Live.new(nil, { times:1 })
67
+ @live.stub!(:be_daemonize)
68
+ @live.stub!(:wait_initial)
69
+ end
70
+ subject{ @live }
71
+ it 'should log exception and sleep' do
72
+ subject.should_receive(:sleep).once
73
+ subject.execute{ |timeline| raise }
74
+ end
75
+ end
76
+ end
77
+
78
+ describe '#wait_initial' do
79
+ context 'when reservation_time is nil' do
80
+ subject{ Live::reserve }
81
+ it 'not sleep' do
82
+ subject.should_not_receive(:sleep)
83
+ subject.wait_initial
84
+ end
85
+ end
86
+ context 'given 10 hours later' do
87
+ before { Time.stub!(:now).and_return(Time.parse('2011-03-05 04:00')) }
88
+ subject{ Live.reserve(Time.parse('2011-03-05 14:00')) }
89
+ it 'sleep 36000 sec' do
90
+ subject.should_receive(:sleep).with(1.0*60*60*10)
91
+ subject.wait_initial
92
+ end
93
+ end
94
+ context 'given past time' do
95
+ before { Time.stub!(:now).and_return(Time.parse('2011-03-05 14:01')) }
96
+ subject{ Live.reserve(Time.parse('2011-03-05 14:00')) }
97
+ it 'not sleep' do
98
+ subject.should_receive(:sleep).with(0)
99
+ subject.wait_initial
100
+ end
101
+ end
102
+ end
103
+
104
+ describe '#update' do
105
+ before do
106
+ @first_timeline =
107
+ [
108
+ Live::Timeline.parse('1分 右サイドからボールをつながれ攻撃を仕掛けられるが札幌DFが落ち着いてクリア'),
109
+ Live::Timeline.parse('2分 左サイドキリノのパスカットから攻撃を仕掛けるがシュートまでは持ち込めず'),
110
+ Live::Timeline.parse('3分 ゴール前ほぼ正面やや遠めのFKを上里が直接狙うが湘南DFの壁に当たる'),
111
+ ]
112
+ end
113
+ context 'when first time' do
114
+ before { Live.stub!(:parse).and_return(@first_timeline) }
115
+ subject{ Live::Live.new }
116
+ it{ expect{ subject.update }.to change{ subject.posted.dup }.from([]).to(@first_timeline) }
117
+ end
118
+ context 'when second time' do
119
+ before do
120
+ @second_timeline = @first_timeline.clone.push(Live::Timeline.parse('3分 右サイドからのクロスに阿部(湘南)がヘッドであわせるがGK高原がキャッチ'))
121
+ Live.stub!(:parse).and_return(@first_timeline, @second_timeline)
122
+ @live = Live::Live.new
123
+ @live.update
124
+ end
125
+ subject{ @live }
126
+ it { expect { subject.update }.to change{ subject.posted.dup }.from(@first_timeline).to(@second_timeline) }
127
+ end
128
+ context 'given block' do
129
+ subject{ Live::Live.new }
130
+ before do
131
+ Live.stub!(:parse).and_return(@first_timeline)
132
+ @ary = []
133
+ subject.update { |timeline| @ary << timeline }
134
+ end
135
+ it { @ary.should == @first_timeline }
136
+ end
137
+ end
138
+ end
139
+ end
@@ -0,0 +1,156 @@
1
+ # -*- coding: utf-8 -*-
2
+ require_relative '../spec_helper'
3
+
4
+ include ConsadoleAggregator
5
+
6
+ describe ConsadoleAggregator do
7
+ describe Aggregatable do
8
+ before do
9
+ @articles = [{ url:'http://example.jp/', title:'hoge' },
10
+ { url:'http://example.com/', title:'fuga' }]
11
+ get_resource_stub = double('get_resource')
12
+ get_resource_stub.stub(:call).and_return('')
13
+ parse_list_stub = double('parse_list')
14
+ parse_list_stub.stub(:call).and_return(['http://example.jp/', 'http://example.com/'])
15
+ parse_article_stub = double('parse_article')
16
+ parse_article_stub.stub(:call) do |arg|
17
+ if arg == 'http://example.jp/'
18
+ { url:arg, title:'hoge' }
19
+ else
20
+ { url:arg, title:'fuga' }
21
+ end
22
+ end
23
+ klass = Class.new do
24
+ include Aggregatable
25
+ @get_resource = get_resource_stub
26
+ @parse_list = parse_list_stub
27
+ @parse_article = parse_article_stub
28
+ end
29
+ ConsadoleAggregator::News.const_set(:TestClass, klass) # FIXME How do I suppress warning?
30
+ subject.stub(:save_strage)
31
+ end
32
+
33
+ subject{ News::TestClass.new }
34
+
35
+ describe '#get_new_articles' do
36
+ context 'when article straged' do
37
+ before do
38
+ @straged = [@articles.first]
39
+ subject.stub!(:get_strage).and_return(@straged)
40
+ end
41
+ it 'should return part of articles' do
42
+ subject.get_new_articles.should == @articles - @straged
43
+ end
44
+ end
45
+ end
46
+
47
+ describe '#update' do
48
+ context 'when default' do
49
+ it 'should call ordered' do
50
+ subject.should_receive(:get_new_articles).ordered.and_return([])
51
+ subject.should_receive(:save_strage).ordered
52
+ subject.update
53
+ end
54
+ end
55
+ context 'when new_articles exist' do
56
+ before do
57
+ YAML.stub!(:load_file).and_return([])
58
+ subject.stub!(:get_new_articles).and_return(@articles)
59
+ end
60
+ it 'should add strage' do
61
+ expect{ subject.update }.to change{ subject.get_strage.dup }.from([]).to(@articles)
62
+ end
63
+ end
64
+ end
65
+
66
+ describe '#get_strage' do
67
+ context 'when yaml can load' do
68
+ it 'should load from yaml' do
69
+ YAML.should_receive(:load_file).with(/\/db\/TestClass.yaml$/)
70
+ subject.get_strage
71
+ end
72
+ end
73
+ context 'when yaml can\'t load' do
74
+ before do
75
+ YAML.stub!(:load_file){ raise }
76
+ end
77
+ it 'should load from yaml' do
78
+ subject.get_strage == []
79
+ end
80
+ end
81
+ end
82
+
83
+ describe '#build_strage_path' do
84
+ context 'when Testclass' do
85
+ it { subject.build_strage_path.should match /\/db\/TestClass.yaml$/ }
86
+ end
87
+ end
88
+ end
89
+
90
+ describe News do
91
+ before do
92
+ ConsadoleAggregator::News::Nikkansports.get_resource =
93
+ ->{ File.read('./spec/ext/nikkansports.txt').toutf8 }
94
+ ConsadoleAggregator::News::Hochiyomiuri.get_resource =
95
+ ->{ File.read('./spec/ext/hochiyomiuri.txt').toutf8 }
96
+ ConsadoleAggregator::News::Asahi.get_resource =
97
+ ->{ File.read('./spec/ext/asahi.txt').toutf8 }
98
+ ConsadoleAggregator::News::Forzaconsadole.get_resource =
99
+ ->{ File.read('./spec/ext/forzaconsadole.txt').toutf8 }
100
+ ConsadoleAggregator::News::Consaburn.get_resource =
101
+ ->{ File.read('./spec/ext/consaburn.txt').toutf8 }
102
+ ConsadoleAggregator::News::Consaclub.get_resource =
103
+ ->{ File.read('./spec/ext/consaclub.txt').toutf8 }
104
+ ConsadoleAggregator::News::Consadolenews.get_resource =
105
+ ->{ File.read('./spec/ext/consadolenews.txt').toutf8 }
106
+ ConsadoleAggregator::News::Consadolesponsornews.get_resource =
107
+ ->{ File.read('./spec/ext/consadolesponsornews.txt').toutf8 }
108
+ ConsadoleAggregator::News::Consadolephotos.get_resource =
109
+ ->{ File.read('./spec/ext/consadolephotos.txt').toutf8 }
110
+ ConsadoleAggregator::News::Jsgoalnews.get_resource =
111
+ ->{ File.read('./spec/ext/jsgoalnews.txt').toutf8 }
112
+ ConsadoleAggregator::News::Jsgoalphotos.get_resource =
113
+ ->{ File.read('./spec/ext/jsgoalphotos.txt').toutf8 }
114
+
115
+ module News
116
+ def self.trace(url_path, limit=nil)
117
+ url_path
118
+ end
119
+ end
120
+ end
121
+
122
+ it 'Nikkansports should not raise Exception' do
123
+ expect{ ConsadoleAggregator::News::Nikkansports.new.get_new_articles }.to_not raise_error
124
+ end
125
+ it 'Hochiyomiuri should not raise Exception' do
126
+ expect{ ConsadoleAggregator::News::Hochiyomiuri.new.get_new_articles }.to_not raise_error
127
+ end
128
+ it 'Asahi should not raise Exception' do
129
+ expect{ ConsadoleAggregator::News::Asahi.new.get_new_articles }.to_not raise_error
130
+ end
131
+ it 'Forzaconsadole should not raise Exception' do
132
+ expect{ ConsadoleAggregator::News::Forzaconsadole.new.get_new_articles }.to_not raise_error
133
+ end
134
+ it 'Consaburn should not raise Exception' do
135
+ expect{ ConsadoleAggregator::News::Consaburn.new.get_new_articles }.to_not raise_error
136
+ end
137
+ it 'Consaclub should not raise Exception' do
138
+ expect{ ConsadoleAggregator::News::Consaclub.new.get_new_articles }.to_not raise_error
139
+ end
140
+ it 'Consadolenews should not raise Exception' do
141
+ expect{ ConsadoleAggregator::News::Consadolenews.new.get_new_articles }.to_not raise_error
142
+ end
143
+ it 'Consadolesponsornews should not raise Exception' do
144
+ expect{ ConsadoleAggregator::News::Consadolesponsornews.new.get_new_articles }.to_not raise_error
145
+ end
146
+ it 'Consadolephotos should not raise Exception' do
147
+ expect{ ConsadoleAggregator::News::Consadolephotos.new.get_new_articles }.to_not raise_error
148
+ end
149
+ it 'Jsgoalnews should not raise Exception' do
150
+ expect{ ConsadoleAggregator::News::Jsgoalnews.new.get_new_articles }.to_not raise_error
151
+ end
152
+ it 'Jsgoalphotos should not raise Exception' do
153
+ expect{ ConsadoleAggregator::News::Jsgoalphotos.new.get_new_articles }.to_not raise_error
154
+ end
155
+ end
156
+ end