ruby-hackernews 1.2.1 → 1.3.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (27) hide show
  1. data/README.rdoc +4 -0
  2. data/Rakefile +1 -1
  3. data/lib/ruby-hackernews/domain/comment/comment.rb +44 -41
  4. data/lib/ruby-hackernews/domain/entry/comments_info.rb +16 -13
  5. data/lib/ruby-hackernews/domain/entry/entry.rb +64 -60
  6. data/lib/ruby-hackernews/domain/entry/link_info.rb +12 -9
  7. data/lib/ruby-hackernews/domain/entry/time_info.rb +18 -15
  8. data/lib/ruby-hackernews/domain/entry/user_info.rb +11 -8
  9. data/lib/ruby-hackernews/domain/entry/voting_info.rb +12 -9
  10. data/lib/ruby-hackernews/domain/user.rb +31 -27
  11. data/lib/ruby-hackernews/services/comment_service.rb +54 -51
  12. data/lib/ruby-hackernews/services/configuration_service.rb +25 -22
  13. data/lib/ruby-hackernews/services/entry_service.rb +53 -51
  14. data/lib/ruby-hackernews/services/login_service.rb +23 -20
  15. data/lib/ruby-hackernews/services/mechanize_context.rb +23 -19
  16. data/lib/ruby-hackernews/services/not_authenticated_error.rb +8 -5
  17. data/lib/ruby-hackernews/services/parsers/comments_info_parser.rb +16 -11
  18. data/lib/ruby-hackernews/services/parsers/entry_page_parser.rb +20 -17
  19. data/lib/ruby-hackernews/services/parsers/entry_parser.rb +25 -22
  20. data/lib/ruby-hackernews/services/parsers/link_info_parser.rb +14 -12
  21. data/lib/ruby-hackernews/services/parsers/time_info_parser.rb +13 -11
  22. data/lib/ruby-hackernews/services/parsers/user_info_parser.rb +15 -11
  23. data/lib/ruby-hackernews/services/parsers/voting_info_parser.rb +16 -12
  24. data/lib/ruby-hackernews/services/signup_service.rb +15 -12
  25. data/lib/ruby-hackernews/services/user_info_service.rb +21 -18
  26. data/lib/ruby-hackernews/services/voting_service.rb +10 -7
  27. metadata +24 -24
@@ -1,62 +1,65 @@
1
+ module RubyHackernews
1
2
 
2
- class CommentService
3
- include MechanizeContext
3
+ class CommentService
4
+ include MechanizeContext
4
5
 
5
- def get_comments(page_url)
6
- comments = []
7
- last = comments
8
- current_level = -1
9
- page = agent.get(page_url)
10
- page.search("//table")[3].search("table/tr").select do |tr|
11
- tr.search("span.comment").inner_html != "[deleted]"
12
- end.each do |tr|
13
- comment = parse_comment(tr)
14
- level = tr.search("img[@src='http://ycombinator.com/images/s.gif']").first['width'].to_i / 40
15
- difference = current_level - level
16
- target = last
17
- (difference + 1).times do
18
- target = target.parent || comments
6
+ def get_comments(page_url)
7
+ comments = []
8
+ last = comments
9
+ current_level = -1
10
+ page = agent.get(page_url)
11
+ page.search("//table")[3].search("table/tr").select do |tr|
12
+ tr.search("span.comment").inner_html != "[deleted]"
13
+ end.each do |tr|
14
+ comment = parse_comment(tr)
15
+ level = tr.search("img[@src='http://ycombinator.com/images/s.gif']").first['width'].to_i / 40
16
+ difference = current_level - level
17
+ target = last
18
+ (difference + 1).times do
19
+ target = target.parent || comments
20
+ end
21
+ target << comment
22
+ last = comment
23
+ current_level = level
19
24
  end
20
- target << comment
21
- last = comment
22
- current_level = level
25
+ return comments
23
26
  end
24
- return comments
25
- end
26
27
 
27
- def get_new_comments(pages = 1, url = ConfigurationService.comments_url)
28
- parser = EntryPageParser.new(agent.get(url))
29
- comments = []
30
- pages.times do
31
- lines = parser.get_lines
32
- lines.each do |line|
33
- comments << parse_comment(line)
28
+ def get_new_comments(pages = 1, url = ConfigurationService.comments_url)
29
+ parser = EntryPageParser.new(agent.get(url))
30
+ comments = []
31
+ pages.times do
32
+ lines = parser.get_lines
33
+ lines.each do |line|
34
+ comments << parse_comment(line)
35
+ end
36
+ next_url = parser.get_next_url || break
37
+ parser = EntryPageParser.new(agent.get(next_url))
34
38
  end
35
- next_url = parser.get_next_url || break
36
- parser = EntryPageParser.new(agent.get(next_url))
39
+ return comments
37
40
  end
38
- return comments
39
- end
40
41
 
41
- def parse_comment(element)
42
- text = ""
43
- element.search("span.comment").first.children.each do |ch|
44
- text = ch.inner_html.gsub(/<.{1,2}>/,"")
42
+ def parse_comment(element)
43
+ text = ""
44
+ element.search("span.comment").first.children.each do |ch|
45
+ text = ch.inner_html.gsub(/<.{1,2}>/,"")
46
+ end
47
+ header = element.search("span.comhead").first
48
+ voting = VotingInfoParser.new(element.search("td/center/a"), header).parse
49
+ user_info = UserInfoParser.new(header).parse
50
+ reply_link = element.search("td[@class='default']/p//u//a").first
51
+ reply_url = reply_link['href'] if reply_link
52
+ return Comment.new(text, voting, user_info, reply_url)
45
53
  end
46
- header = element.search("span.comhead").first
47
- voting = VotingInfoParser.new(element.search("td/center/a"), header).parse
48
- user_info = UserInfoParser.new(header).parse
49
- reply_link = element.search("td[@class='default']/p//u//a").first
50
- reply_url = reply_link['href'] if reply_link
51
- return Comment.new(text, voting, user_info, reply_url)
52
- end
53
54
 
54
- def write_comment(page_url, comment)
55
- require_authentication
56
- form = agent.get(page_url).forms.first
57
- form.text = comment
58
- form.submit
59
- return true
60
- end
55
+ def write_comment(page_url, comment)
56
+ require_authentication
57
+ form = agent.get(page_url).forms.first
58
+ form.text = comment
59
+ form.submit
60
+ return true
61
+ end
61
62
 
62
- end
63
+ end
64
+
65
+ end
@@ -1,32 +1,35 @@
1
+ module RubyHackernews
1
2
 
2
- class ConfigurationService
3
+ class ConfigurationService
3
4
 
4
- def self.base_url=(url)
5
- @base_url = url
6
- end
5
+ def self.base_url=(url)
6
+ @base_url = url
7
+ end
7
8
 
8
- def self.base_url
9
- return @base_url || "http://news.ycombinator.com/"
10
- end
9
+ def self.base_url
10
+ return @base_url || "http://news.ycombinator.com/"
11
+ end
11
12
 
12
- def self.new_url
13
- return File.join(self.base_url, "newest")
14
- end
13
+ def self.new_url
14
+ return File.join(self.base_url, "newest")
15
+ end
15
16
 
16
- def self.ask_url
17
- return File.join(self.base_url, "ask")
18
- end
17
+ def self.ask_url
18
+ return File.join(self.base_url, "ask")
19
+ end
19
20
 
20
- def self.jobs_url
21
- return File.join(self.base_url, "jobs")
22
- end
21
+ def self.jobs_url
22
+ return File.join(self.base_url, "jobs")
23
+ end
23
24
 
24
- def self.comments_url
25
- return File.join(self.base_url, "newcomments")
26
- end
25
+ def self.comments_url
26
+ return File.join(self.base_url, "newcomments")
27
+ end
28
+
29
+ def self.submit_url
30
+ return File.join(self.base_url, "submit")
31
+ end
27
32
 
28
- def self.submit_url
29
- return File.join(self.base_url, "submit")
30
33
  end
31
34
 
32
- end
35
+ end
@@ -1,65 +1,67 @@
1
+ module RubyHackernews
1
2
 
2
- class EntryService
3
- include MechanizeContext
3
+ class EntryService
4
+ include MechanizeContext
4
5
 
5
- def get_entries(pages = 1, url = ConfigurationService.base_url)
6
- parser = EntryPageParser.new(agent.get(url))
7
- entry_infos = []
8
- pages.times do
9
- lines = parser.get_lines
10
- (lines.length / 2).times do
11
- entry_infos << EntryParser.new(lines.shift, lines.shift).parse
6
+ def get_entries(pages = 1, url = ConfigurationService.base_url)
7
+ parser = EntryPageParser.new(agent.get(url))
8
+ entry_infos = []
9
+ pages.times do
10
+ lines = parser.get_lines
11
+ (lines.length / 2).times do
12
+ entry_infos << EntryParser.new(lines.shift, lines.shift).parse
13
+ end
14
+ next_url = parser.get_next_url || break
15
+ parser = EntryPageParser.new(agent.get(next_url))
12
16
  end
13
- next_url = parser.get_next_url || break
14
- parser = EntryPageParser.new(agent.get(next_url))
17
+ return entry_infos
15
18
  end
16
- return entry_infos
17
- end
18
19
 
19
- def find_by_id(id)
20
- page = agent.get(ConfigurationService.base_url + "item?id=#{id}")
21
- lines = page.search("table")[2].search("tr")
22
- return EntryParser.new(lines[0], lines[1]).parse
23
- end
20
+ def find_by_id(id)
21
+ page = agent.get(ConfigurationService.base_url + "item?id=#{id}")
22
+ lines = page.search("table")[2].search("tr")
23
+ return EntryParser.new(lines[0], lines[1]).parse
24
+ end
24
25
 
25
- def get_new_entries(pages = 1)
26
- return get_entries(pages, ConfigurationService.new_url)
27
- end
26
+ def get_new_entries(pages = 1)
27
+ return get_entries(pages, ConfigurationService.new_url)
28
+ end
28
29
 
29
- def get_questions(pages = 1)
30
- return get_entries(pages, ConfigurationService.ask_url)
31
- end
30
+ def get_questions(pages = 1)
31
+ return get_entries(pages, ConfigurationService.ask_url)
32
+ end
32
33
 
33
- def get_jobs(pages = 1)
34
- return get_entries(pages, ConfigurationService.jobs_url)
35
- end
36
-
37
- def submit(title, url)
38
- require_authentication
39
- form = agent.get(ConfigurationService.submit_url).forms.first
40
- submit_link(form, title, url)
41
- return true
42
- end
34
+ def get_jobs(pages = 1)
35
+ return get_entries(pages, ConfigurationService.jobs_url)
36
+ end
37
+
38
+ def submit(title, url)
39
+ require_authentication
40
+ form = agent.get(ConfigurationService.submit_url).forms.first
41
+ submit_link(form, title, url)
42
+ return true
43
+ end
43
44
 
44
- def ask(title, text)
45
- require_authentication
46
- form = agent.get(ConfigurationService.submit_url).forms.first
47
- submit_question(form, title, text)
48
- return true
49
- end
45
+ def ask(title, text)
46
+ require_authentication
47
+ form = agent.get(ConfigurationService.submit_url).forms.first
48
+ submit_question(form, title, text)
49
+ return true
50
+ end
50
51
 
51
- private
52
- def submit_link(form, title, url)
53
- form.t = title
54
- form.u = url
55
- form.submit
56
- end
52
+ private
53
+ def submit_link(form, title, url)
54
+ form.t = title
55
+ form.u = url
56
+ form.submit
57
+ end
57
58
 
58
- def submit_question(form, title, text)
59
- form.t = title
60
- form.x = text
61
- form.submit
59
+ def submit_question(form, title, text)
60
+ form.t = title
61
+ form.x = text
62
+ form.submit
63
+ end
64
+
62
65
  end
63
66
 
64
-
65
67
  end
@@ -1,25 +1,28 @@
1
+ module RubyHackernews
1
2
 
2
- class LoginService
3
- include MechanizeContext
3
+ class LoginService
4
+ include MechanizeContext
4
5
 
5
- def login(username, password)
6
- page = agent.get(ConfigurationService.base_url)
7
- login_url = page.search(".pagetop/a").last['href'].sub("/","")
8
- login_page = agent.get(ConfigurationService.base_url + login_url)
9
- form = login_page.forms.first
10
- form.u = username
11
- form.p = password
12
- page = form.submit
13
- return page.title != nil
14
- end
6
+ def login(username, password)
7
+ page = agent.get(ConfigurationService.base_url)
8
+ login_url = page.search(".pagetop/a").last['href'].sub("/","")
9
+ login_page = agent.get(ConfigurationService.base_url + login_url)
10
+ form = login_page.forms.first
11
+ form.u = username
12
+ form.p = password
13
+ page = form.submit
14
+ return page.title != nil
15
+ end
16
+
17
+ def logout
18
+ require_authentication
19
+ page = agent.get(ConfigurationService.base_url)
20
+ login_url = page.search(".pagetop/a").last['href'].sub("/","")
21
+ logout_page = agent.get(ConfigurationService.base_url + login_url)
22
+ agent.cookie_jar.jar.clear
23
+ return logout_page.search(".pagetop/a").last.inner_html == "login"
24
+ end
15
25
 
16
- def logout
17
- require_authentication
18
- page = agent.get(ConfigurationService.base_url)
19
- login_url = page.search(".pagetop/a").last['href'].sub("/","")
20
- logout_page = agent.get(ConfigurationService.base_url + login_url)
21
- agent.cookie_jar.jar.clear
22
- return logout_page.search(".pagetop/a").last.inner_html == "login"
23
26
  end
24
27
 
25
- end
28
+ end
@@ -1,27 +1,31 @@
1
- module MechanizeContext
1
+ module RubyHackernews
2
2
 
3
- @@contexts = {}
3
+ module MechanizeContext
4
4
 
5
- def self.agent=(key)
6
- @@default = key
7
- end
5
+ @@contexts = {}
8
6
 
9
- def agent
10
- @@default ||= :default
11
- @@contexts[@@default] = Mechanize.new unless @@contexts[@@default]
12
- return @@contexts[@@default]
13
- end
7
+ def self.agent=(key)
8
+ @@default = key
9
+ end
14
10
 
15
- def [](key)
16
- return @@contexts[key]
17
- end
11
+ def agent
12
+ @@default ||= :default
13
+ @@contexts[@@default] = Mechanize.new unless @@contexts[@@default]
14
+ return @@contexts[@@default]
15
+ end
18
16
 
19
- def require_authentication
20
- raise NotAuthenticatedError unless authenticated?
21
- end
17
+ def [](key)
18
+ return @@contexts[key]
19
+ end
20
+
21
+ def require_authentication
22
+ raise NotAuthenticatedError unless authenticated?
23
+ end
24
+
25
+ def authenticated?(key = :default)
26
+ return @@contexts[key] && @@contexts[key].cookie_jar.jar.any?
27
+ end
22
28
 
23
- def authenticated?(key = :default)
24
- return @@contexts[key] && @@contexts[key].cookie_jar.jar.any?
25
29
  end
26
30
 
27
- end
31
+ end
@@ -1,8 +1,11 @@
1
+ module RubyHackernews
1
2
 
2
- class NotAuthenticatedError < StandardError
3
+ class NotAuthenticatedError < StandardError
3
4
 
4
- def message
5
- return "You need to authenticate before making this operation"
5
+ def message
6
+ return "You need to authenticate before making this operation"
7
+ end
8
+
6
9
  end
7
-
8
- end
10
+
11
+ end
@@ -1,16 +1,21 @@
1
- class CommentsInfoParser
1
+ module RubyHackernews
2
2
 
3
- def initialize(comments_element)
4
- @element = comments_element.search("a")[1]
5
- end
3
+ class CommentsInfoParser
4
+
5
+ def initialize(comments_element)
6
+ @element = comments_element.search("a")[1]
7
+ end
6
8
 
7
- def parse
8
- comments_info = nil
9
- if @element && @element['href'] =~ /id/
10
- comments = @element.inner_html.split[0].to_i
11
- comments_page = @element['href']
12
- comments_info = CommentsInfo.new(comments, comments_page)
9
+ def parse
10
+ comments_info = nil
11
+ if @element && @element['href'] =~ /id/
12
+ comments = @element.inner_html.split[0].to_i
13
+ comments_page = @element['href']
14
+ comments_info = CommentsInfo.new(comments, comments_page)
15
+ end
16
+ return comments_info
13
17
  end
14
- return comments_info
18
+
15
19
  end
20
+
16
21
  end
@@ -1,24 +1,27 @@
1
+ module RubyHackernews
1
2
 
2
- class EntryPageParser
3
+ class EntryPageParser
3
4
 
4
- def initialize(page)
5
- @page = page
6
- end
5
+ def initialize(page)
6
+ @page = page
7
+ end
7
8
 
8
- def get_lines
9
- lines = @page.search("//table")[2].search("tr").select do |tr|
10
- tr['style'] !~ /height/ &&
11
- tr.children.first.attributes.count != 0
9
+ def get_lines
10
+ lines = @page.search("//table")[2].search("tr").select do |tr|
11
+ tr['style'] !~ /height/ &&
12
+ tr.children.first.attributes.count != 0
13
+ end
14
+ more_link = lines.last.search("a").first
15
+ lines.pop if more_link && more_link.inner_html == "More"
16
+ return lines
17
+ end
18
+
19
+ def get_next_url
20
+ more_link = @page.search("//table")[2].search("tr/td/a").select { |node| node.inner_html == "More"}.first
21
+ return more_link['href'] if more_link
22
+ return nil
12
23
  end
13
- more_link = lines.last.search("a").first
14
- lines.pop if more_link && more_link.inner_html == "More"
15
- return lines
16
- end
17
24
 
18
- def get_next_url
19
- more_link = @page.search("//table")[2].search("tr/td/a").select { |node| node.inner_html == "More"}.first
20
- return more_link['href'] if more_link
21
- return nil
22
25
  end
23
26
 
24
- end
27
+ end