share_counts 0.0.9 → 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,4 @@
1
+ require "autotest/fsevent"
2
+ require "autotest/restart"
3
+ require "redgreen/autotest"
4
+
@@ -1,7 +1,8 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- share_counts (0.0.6)
4
+ share_counts (0.0.9)
5
+ SystemTimer
5
6
  json
6
7
  nokogiri
7
8
  redis
@@ -10,19 +11,24 @@ PATH
10
11
  GEM
11
12
  remote: http://rubygems.org/
12
13
  specs:
13
- ansi (1.2.2)
14
+ SystemTimer (1.2.3)
15
+ activesupport (3.0.5)
16
+ addressable (2.2.4)
17
+ crack (0.1.8)
14
18
  json (1.5.1)
15
19
  mime-types (1.16)
16
- minitest (2.0.2)
17
20
  nokogiri (1.4.4)
18
- redis (2.1.1)
21
+ redis (2.2.0)
19
22
  rest-client (1.6.1)
20
23
  mime-types (>= 1.16)
24
+ webmock (1.6.2)
25
+ addressable (>= 2.2.2)
26
+ crack (>= 0.1.7)
21
27
 
22
28
  PLATFORMS
23
29
  ruby
24
30
 
25
31
  DEPENDENCIES
26
- ansi
27
- minitest
32
+ activesupport
28
33
  share_counts!
34
+ webmock
@@ -0,0 +1,2 @@
1
+ $LOAD_PATH.unshift File.expand_path('../', File.dirname(__FILE__))
2
+ Autotest.add_discovery { "rules" }
@@ -0,0 +1,12 @@
1
+ require 'autotest'
2
+
3
+ class Autotest::Rules < Autotest
4
+ def initialize
5
+ super
6
+
7
+ add_mapping(%r{^lib/.*\.rb$}, true) { |filename, _|
8
+ files_matching %r%^test/test_.*\.rb$%
9
+ }
10
+ end
11
+ end
12
+
@@ -1,5 +1,5 @@
1
- %w( rest_client json nokogiri redis timeout ).each{ |lib| require lib }
2
- %w( caching common reddit ).each{ |file| load File.expand_path( File.join( File.dirname( __FILE__ ), "share_counts", "#{file}.rb" ) ) } # TODO: replace load with require
1
+ %w( rubygems rest_client json nokogiri redis ).each{ |lib| require lib }
2
+ %w( array caching common reddit ).each{ |file| load File.expand_path( File.join( File.dirname( __FILE__ ), "share_counts", "#{file}.rb" ) ) } # TODO: replace load with require
3
3
 
4
4
  module ShareCounts
5
5
 
@@ -11,72 +11,57 @@ module ShareCounts
11
11
  end
12
12
 
13
13
  def self.supported_networks
14
- %w(reddit digg twitter facebook fblike linkedin googlebuzz stumbleupon)
14
+ %w(reddit digg twitter facebook linkedin stumbleupon googlebuzz)
15
15
  end
16
-
17
- def self.reddit url
18
- try("reddit", url) {
16
+
17
+ def self.reddit url, raise_exceptions = false
18
+ try("reddit", url, raise_exceptions) {
19
19
  extract_count from_json( "http://www.reddit.com/api/info.json", :url => url ),
20
20
  :selector => "data/children/data/score"
21
21
  }
22
22
  end
23
-
24
- def self.reddit_with_permalink url
25
- ShareCounts::Reddit.info_for url
23
+
24
+ def self.reddit_with_permalink url, raise_exceptions = false
25
+ ShareCounts::Reddit.info_for url, raise_exceptions
26
26
  end
27
-
28
- def self.digg url
29
- try("digg", url) {
27
+
28
+ def self.digg url, raise_exceptions = false
29
+ try("digg", url, raise_exceptions) {
30
30
  extract_count from_json( "http://services.digg.com/2.0/story.getInfo", :links => url ),
31
31
  :selector => "stories/diggs"
32
32
  }
33
33
  end
34
34
 
35
- def self.twitter url
36
- try("twitter", url) {
35
+ def self.twitter url, raise_exceptions = false
36
+ try("twitter", url, raise_exceptions) {
37
37
  extract_count from_json( "http://urls.api.twitter.com/1/urls/count.json", :url => url),
38
38
  :selector => "count"
39
39
  }
40
40
  end
41
41
 
42
- def self.facebook url
43
- try("facebook", url) {
44
- extract_count from_json("http://api.facebook.com/restserver.php", :v => "1.0", :method => "links.getStats",
45
- :urls => url, :callback => "fb_sharepro_render", :format => "json" ), :selector => "share_count"
46
- }
47
- end
48
-
49
- def self.fblike url
50
- try("fblike", url) {
42
+ def self.facebook url, raise_exceptions = false
43
+ try("facebook", url, raise_exceptions) {
51
44
  extract_count from_json("http://api.facebook.com/restserver.php", :v => "1.0", :method => "links.getStats",
52
- :urls => url, :callback => "fb_sharepro_render", :format => "json" ), :selector => "like_count"
53
- }
54
- end
55
-
56
- def self.fball url
57
- try("fball", url) {
58
- json = from_json("http://api.facebook.com/restserver.php", :v => "1.0",
59
- :method => "links.getStats", :urls => url, :callback => "fb_sharepro_render", :format => "json"
60
- ).first.select{ |k,v| ["share_count", "like_count"].include? k }
45
+ :urls => url, :callback => "fb_sharepro_render", :format => "json" ), :selector => "total_count"
61
46
  }
62
47
  end
63
48
 
64
- def self.linkedin url
65
- try("linkedin", url) {
49
+ def self.linkedin url, raise_exceptions = false
50
+ try("linkedin", url, raise_exceptions) {
66
51
  extract_count from_json("http://www.linkedin.com/cws/share-count",
67
52
  :url => url, :callback => "IN.Tags.Share.handleCount" ), :selector => "count"
68
53
  }
69
54
  end
70
55
 
71
- def self.googlebuzz url
72
- try("googlebuzz", url) {
56
+ def self.googlebuzz url, raise_exceptions = false
57
+ try("googlebuzz", url, raise_exceptions) {
73
58
  from_json("http://www.google.com/buzz/api/buzzThis/buzzCounter",
74
59
  :url => url, :callback => "google_buzz_set_count" )[url]
75
60
  }
76
61
  end
77
62
 
78
- def self.stumbleupon url
79
- try("stumbleupon", url) {
63
+ def self.stumbleupon url, raise_exceptions = false
64
+ try("stumbleupon", url, raise_exceptions) {
80
65
  Nokogiri::HTML.parse(
81
66
  make_request("http://www.stumbleupon.com/badge/embed/5/", :url => url )
82
67
  ).xpath( "//body/div/ul/li[2]/a/span").text.to_i
@@ -86,11 +71,11 @@ module ShareCounts
86
71
  def self.all url
87
72
  supported_networks.inject({}) { |r, c| r[c.to_sym] = ShareCounts.send(c, url); r }
88
73
  end
89
-
74
+
90
75
  def self.selected url, selections
91
76
  selections.map{|name| name.downcase}.select{|name| supported_networks.include? name.to_s}.inject({}) {
92
77
  |r, c| r[c.to_sym] = ShareCounts.send(c, url); r }
93
78
  end
94
-
79
+
95
80
  end
96
81
 
@@ -0,0 +1,5 @@
1
+ class Array
2
+ def to_hash
3
+ @hash ||= self.inject({}){|r, c| r.merge!(c); r }
4
+ end
5
+ end
@@ -17,7 +17,7 @@ module ShareCounts
17
17
  # NOTE: caching will be skipped if the block fails.
18
18
  #
19
19
  #
20
- def try service, url, &block
20
+ def try service, url, raise_exceptions = false, &block
21
21
  cache_key = "ShareCounts||#{service}||#{url}"
22
22
  if cache_enabled?
23
23
  if result = from_redis(cache_key)
@@ -33,6 +33,7 @@ module ShareCounts
33
33
  end
34
34
  rescue Exception => e
35
35
  puts "Something went wrong with #{service}: #{e}"
36
+ raise e if raise_exceptions
36
37
  end
37
38
 
38
39
 
@@ -49,25 +50,26 @@ module ShareCounts
49
50
  def make_request *args
50
51
  result = nil
51
52
  attempts = 1
53
+ url = args.shift
54
+ params = args.inject({}) { |r, c| r.merge! c }
52
55
 
53
56
  begin
54
- timeout(3) do
55
- url = args.shift
56
- params = args.inject({}) { |r, c| r.merge! c }
57
- response = RestClient.get url, { :params => params }
57
+ response = RestClient.get url, { :params => params, :timeout => 5 }
58
58
 
59
-
60
- # if a callback is specified, the expected response is in the format "callback_name(JSON data)";
61
- # with the response ending with ";" and, in some cases, "\n"
62
- result = params.keys.include?(:callback) \
63
- ? response.gsub(/^(.*);+\n*$/, "\\1").gsub(/^#{params[:callback]}\((.*)\)$/, "\\1") \
64
- : response
65
- end
59
+ # if a callback is specified, the expected response is in the format "callback_name(JSON data)";
60
+ # with the response ending with ";" and, in some cases, "\n"
61
+ result = params.keys.include?(:callback) \
62
+ ? response.gsub(/^(.*);+\n*$/, "\\1").gsub(/^#{params[:callback]}\((.*)\)$/, "\\1") \
63
+ : response
66
64
 
67
65
  rescue Exception => e
68
- puts "Failed #{attempts} attempt(s)"
66
+ puts "Failed #{attempts} attempt(s) - #{e}"
69
67
  attempts += 1
70
- retry if attempts <= 3
68
+ if attempts <= 3
69
+ retry
70
+ else
71
+ raise Exception
72
+ end
71
73
  end
72
74
 
73
75
  result
@@ -82,7 +84,7 @@ module ShareCounts
82
84
  #
83
85
  #
84
86
  def from_json *args
85
- JSON.parse make_request *args
87
+ JSON.parse(make_request(*args))
86
88
  end
87
89
 
88
90
  #
@@ -3,21 +3,15 @@ module ShareCounts
3
3
  extend Common
4
4
  extend Caching
5
5
 
6
- def self.info_for url
7
- try("reddit-details", url) {
6
+ def self.info_for url, raise_exceptions = false
7
+ try("reddit-details", url, raise_exceptions) {
8
8
  data = extract_info from_json( "http://www.reddit.com/api/info.json", :url => url ), :selector => "data/children/data"
9
-
10
- data.reject{ |key, value|
11
- %w( media_embed levenshtein selftext_html selftext likes saved clicked media over_18
12
- hidden thumbnail subreddit_id is_self created subreddit_id created_utc num_comments
13
- domain subreddit id author downs name url title ups
14
- ).include? key
15
- }
9
+ data.select{|k, v| ["permalink", "score"].include? k }.map{|x| { x[0] => x[1] } }.to_hash
16
10
  }
17
11
  end
18
12
 
19
- def self.by_domain domain
20
- try("reddit-domain", domain) {
13
+ def self.by_domain domain, raise_exceptions = false
14
+ try("reddit-domain", domain, raise_exceptions) {
21
15
  urls = extract_info from_json("http://www.reddit.com/domain/#{domain}.json"), :selector => "data/children", :preserve_arrays => true
22
16
  urls.inject({}) do |result, url_all_info|
23
17
  url_data = extract_info(url_all_info, :selector => "data").reject{ |key, value| !["permalink", "score", "url"].include? key }
@@ -4,7 +4,7 @@ $:.push File.expand_path("../lib/share_counts", __FILE__)
4
4
 
5
5
  Gem::Specification.new do |s|
6
6
  s.name = "share_counts"
7
- s.version = "0.0.9"
7
+ s.version = "0.1.0"
8
8
  s.platform = Gem::Platform::RUBY
9
9
  s.authors = ["Vito Botta"]
10
10
  s.email = ["vito@botta.name"]
@@ -16,9 +16,13 @@ Gem::Specification.new do |s|
16
16
  s.add_dependency "json"
17
17
  s.add_dependency "nokogiri"
18
18
  s.add_dependency "redis"
19
+ s.add_dependency "SystemTimer"
19
20
 
20
- s.add_development_dependency "minitest"
21
- s.add_development_dependency "ansi"
21
+ s.add_development_dependency "webmock"
22
+ s.add_development_dependency "activesupport"
23
+ s.add_development_dependency "autotest-growl"
24
+ s.add_development_dependency "autotest-fsevent"
25
+ s.add_development_dependency "redgreen"
22
26
 
23
27
  s.rubyforge_project = "share_counts"
24
28
 
@@ -0,0 +1,59 @@
1
+ {
2
+ "count": 1,
3
+ "timestamp": 1301842122,
4
+ "cursor": "",
5
+ "version": "2.0",
6
+ "stories": [
7
+ {
8
+ "status": "upcoming",
9
+ "permalink": "http://digg.com/news/technology/geeky_cv_resume_d",
10
+ "description": "It's a not a new idea, I've seen something similar some time ago -I think it was in Perl but am not sure, but I remembered it today while updating my CV, so I thought I'd have a short, geekier version too :D",
11
+ "title": "Geeky CV / Resume :D",
12
+ "url": "http://vitobotta.com/cv-resume/",
13
+ "story_id": "20110205002738:141560b8-f74a-4388-9c95-d8638ca4a003",
14
+ "diggs": 1,
15
+ "submiter": {
16
+ "username": "vitobotta",
17
+ "about": "I am a passionate developer with 10+ years commercial experience and 360\u00b0 skills in the design, development and maintenance of modern, user centred as well as enterprise-scale web applications on both *nix and Microsoft platforms, with a strong interest in performance, scalability, security and search engine optimisation. \n\nBesides work, I live in London with my wife and our little baby daughter. I love boxing, martial arts, and good food!",
18
+ "user_id": "4341699",
19
+ "name": "Vito Botta",
20
+ "icons": [
21
+ "http://cdn1.diggstatic.com/user/4341699/c.4057865463.png",
22
+ "http://cdn1.diggstatic.com/user/4341699/h.4057865463.png",
23
+ "http://cdn2.diggstatic.com/user/4341699/m.4057865463.png",
24
+ "http://cdn1.diggstatic.com/user/4341699/l.4057865463.png",
25
+ "http://cdn2.diggstatic.com/user/4341699/p.4057865463.png",
26
+ "http://cdn1.diggstatic.com/user/4341699/s.4057865463.png",
27
+ "http://cdn1.diggstatic.com/user/4341699/r.4057865463.png"
28
+ ],
29
+ "gender": "m",
30
+ "diggs": 34,
31
+ "comments": 31,
32
+ "followers": 11,
33
+ "location": "London",
34
+ "following": 8,
35
+ "submissions": 32,
36
+ "icon": "http://cdn1.diggstatic.com/user/4341699/p.4057865463.png"
37
+ },
38
+ "comments": 0,
39
+ "dugg": 0,
40
+ "topic": {
41
+ "clean_name": "technology",
42
+ "name": "Technology"
43
+ },
44
+ "promote_date": null,
45
+ "activity": [],
46
+ "date_created": 1296865658,
47
+ "thumbnails": {
48
+ "large": "http://cdn1.diggstatic.com/story/geeky_cv_resume_d/l.png",
49
+ "small": "http://cdn2.diggstatic.com/story/geeky_cv_resume_d/s.png",
50
+ "medium": "http://cdn1.diggstatic.com/story/geeky_cv_resume_d/m.png",
51
+ "thumb": "http://cdn1.diggstatic.com/story/geeky_cv_resume_d/t.png"
52
+ }
53
+ }
54
+ ],
55
+ "authorized": 0,
56
+ "data": "stories",
57
+ "method": "story.getInfo",
58
+ "user": null
59
+ }
@@ -0,0 +1 @@
1
+ fb_sharepro_render([{"url":"http:\/\/vitobotta.com\/cv-resume\/","normalized_url":"http:\/\/www.vitobotta.com\/cv-resume\/","share_count":5,"like_count":17,"comment_count":1,"total_count":23,"click_count":0,"comments_fbid":10150098759753676,"commentsbox_count":0}]);
@@ -0,0 +1 @@
1
+ google_buzz_set_count({"http://vitobotta.com/cv-resume/":1});
@@ -0,0 +1 @@
1
+ IN.Tags.Share.handleCount({"count":23,"url":"http://vitobotta.com/cv-resume/"});
@@ -0,0 +1 @@
1
+ {"kind": "Listing", "data": {"modhash": "", "children": [{"kind": "t3", "data": {"domain": "vitobotta.com", "media_embed": {}, "levenshtein": null, "subreddit": "Wordpress", "selftext_html": null, "selftext": "", "likes": null, "saved": false, "id": "g51dn", "clicked": false, "author": "VitoBotta", "media": null, "score": 8, "over_18": false, "hidden": false, "thumbnail": "", "subreddit_id": "t5_2qhjq", "downs": 3, "is_self": false, "permalink": "/r/Wordpress/comments/g51dn/protect_your_wordpress_blogs_administration_from/", "name": "t3_g51dn", "created": 1300268647.0, "url": "http://vitobotta.com/protect-wordpress-blogs-administration-prying-eyes/", "title": "Protect your Wordpress blog's administration from prying eyes", "created_utc": 1300268647.0, "num_comments": 5, "ups": 11}}, {"kind": "t3", "data": {"domain": "vitobotta.com", "media_embed": {}, "levenshtein": null, "subreddit": "geek", "selftext_html": null, "selftext": "", "likes": null, "saved": false, "id": "fuepg", "clicked": false, "author": "VitoBotta", "media": null, "score": 4, "over_18": false, "hidden": false, "thumbnail": "", "subreddit_id": "t5_2qh17", "downs": 3, "is_self": false, "permalink": "/r/geek/comments/fuepg/why_isnt_ssl_turned_on_by_default_for_all_websites/", "name": "t3_fuepg", "created": 1298913557.0, "url": "http://vitobotta.com/why-isnt-ssl-on-by-default-for-all-websites/", "title": "Why isn't SSL turned on by default for all websites?", "created_utc": 1298913557.0, "num_comments": 7, "ups": 7}}, {"kind": "t3", "data": {"domain": "vitobotta.com", "media_embed": {}, "levenshtein": null, "subreddit": "apple", "selftext_html": null, "selftext": "", "likes": null, "saved": false, "id": "fmkxz", "clicked": false, "author": "VitoBotta", "media": null, "score": 22, "over_18": false, "hidden": false, "thumbnail": "", "subreddit_id": "t5_2qh1f", "downs": 6, "is_self": false, "permalink": "/r/apple/comments/fmkxz/faster_internet_browsing_with_alternative_dns/", "name": "t3_fmkxz", "created": 1297876909.0, "url": "http://vitobotta.com/faster-internet-browsing-alternative-dns-servers-fast-local-cache-bind/", "title": "Faster Internet browsing with alternative DNS servers and a local cache on Mac OS X", "created_utc": 1297876909.0, "num_comments": 11, "ups": 28}}, {"kind": "t3", "data": {"domain": "vitobotta.com", "media_embed": {}, "levenshtein": null, "subreddit": "web_design", "selftext_html": null, "selftext": "", "likes": null, "saved": false, "id": "fhw72", "clicked": false, "author": "VitoBotta", "media": null, "score": 3, "over_18": false, "hidden": false, "thumbnail": "", "subreddit_id": "t5_2qh1m", "downs": 3, "is_self": false, "permalink": "/r/web_design/comments/fhw72/a_uptodate_look_at_the_state_of_web_typography/", "name": "t3_fhw72", "created": 1297220858.0, "url": "http://vitobotta.com/web-typography-techniques-usability-performance-seo-security/", "title": "A up-to-date look at the state of web typography with considerations for usability, performance, SEO and security", "created_utc": 1297220858.0, "num_comments": 0, "ups": 6}}, {"kind": "t3", "data": {"domain": "vitobotta.com", "media_embed": {}, "levenshtein": null, "subreddit": "ruby", "selftext_html": null, "selftext": "", "likes": null, "saved": false, "id": "ffik5", "clicked": false, "author": "VitoBotta", "media": null, "score": 29, "over_18": false, "hidden": false, "thumbnail": "", "subreddit_id": "t5_2qh21", "downs": 9, "is_self": false, "permalink": "/r/ruby/comments/ffik5/geeky_cv_d/", "name": "t3_ffik5", "created": 1296864258.0, "url": "http://vitobotta.com/cv-resume/", "title": "Geeky CV :D", "created_utc": 1296864258.0, "num_comments": 24, "ups": 38}}, {"kind": "t3", "data": {"domain": "vitobotta.com", "media_embed": {}, "levenshtein": null, "subreddit": "ruby", "selftext_html": null, "selftext": "", "likes": null, "saved": false, "id": "fceyq", "clicked": false, "author": "VitoBotta", "media": null, "score": 4, "over_18": false, "hidden": false, "thumbnail": "", "subreddit_id": "t5_2qh21", "downs": 1, "is_self": false, "permalink": "/r/ruby/comments/fceyq/share_counts_ruby_gem_the_easiest_way_to_check/", "name": "t3_fceyq", "created": 1296487510.0, "url": "http://vitobotta.com/share-counts-gem-social-networks/", "title": "share_counts Ruby gem: The easiest way to check how many times a URL has been shared on social networks!", "created_utc": 1296487510.0, "num_comments": 0, "ups": 5}}, {"kind": "t3", "data": {"domain": "vitobotta.com", "media_embed": {}, "levenshtein": null, "subreddit": "ruby", "selftext_html": null, "selftext": "", "likes": null, "saved": false, "id": "fb585", "clicked": false, "author": "VitoBotta", "media": null, "score": 2, "over_18": false, "hidden": false, "thumbnail": "", "subreddit_id": "t5_2qh21", "downs": 2, "is_self": false, "permalink": "/r/ruby/comments/fb585/a_serialisable_and_validatable_tableless_model_to/", "name": "t3_fb585", "created": 1296271706.0, "url": "http://vitobotta.com/serialisable-validatable-tableless-model/", "title": "A serialisable and validatable tableless model to get rid of a few tables and speed things up", "created_utc": 1296271706.0, "num_comments": 1, "ups": 4}}, {"kind": "t3", "data": {"domain": "vitobotta.com", "media_embed": {}, "levenshtein": null, "subreddit": "Database", "selftext_html": null, "selftext": "", "likes": null, "saved": false, "id": "faaji", "clicked": false, "author": "VitoBotta", "media": null, "score": 5, "over_18": false, "hidden": false, "thumbnail": "http://thumbs.reddit.com/t3_faaji.png", "subreddit_id": "t5_2qian", "downs": 2, "is_self": false, "permalink": "/r/Database/comments/faaji/smarter_faster_backups_and_restores_of_mysql/", "name": "t3_faaji", "created": 1296163917.0, "url": "http://vitobotta.com/smarter-faster-backups-restores-mysql-databases-with-mysqldump/", "title": "Smarter, faster backups and restores of MySQL databases using mysqldump - and other useful tips", "created_utc": 1296163917.0, "num_comments": 4, "ups": 7}}, {"kind": "t3", "data": {"domain": "vitobotta.com", "media_embed": {}, "levenshtein": null, "subreddit": "ruby", "selftext_html": null, "selftext": "", "likes": null, "saved": false, "id": "fa2k4", "clicked": false, "author": "trustfundbaby", "media": null, "score": 7, "over_18": false, "hidden": false, "thumbnail": "", "subreddit_id": "t5_2qh21", "downs": 1, "is_self": false, "permalink": "/r/ruby/comments/fa2k4/why_you_should_think_twice_before_using_awesome/", "name": "t3_fa2k4", "created": 1296142177.0, "url": "http://vitobotta.com/awesomeprint-similar-production/", "title": "Why you should think twice before using awesome_print (and similar gems) in production", "created_utc": 1296142177.0, "num_comments": 0, "ups": 8}}, {"kind": "t3", "data": {"domain": "vitobotta.com", "media_embed": {}, "levenshtein": null, "subreddit": "webdev", "selftext_html": null, "selftext": "", "likes": null, "saved": false, "id": "g8yy9", "clicked": false, "author": "VitoBotta", "media": null, "score": 0, "over_18": false, "hidden": false, "thumbnail": "http://thumbs.reddit.com/t3_g8yy9.png", "subreddit_id": "t5_2qs0q", "downs": 2, "is_self": false, "permalink": "/r/webdev/comments/g8yy9/migrating_from_wordpress_to_jekyll_part_1_why_i/", "name": "t3_g8yy9", "created": 1300806073.0, "url": "http://vitobotta.com/migrating-from-wordpress-to-jekyll-part-one-why-I-gave-up-on-wordpress/", "title": "Migrating from Wordpress to Jekyll - Part 1: Why I gave up on Wordpress", "created_utc": 1300806073.0, "num_comments": 3, "ups": 2}}, {"kind": "t3", "data": {"domain": "vitobotta.com", "media_embed": {}, "levenshtein": null, "subreddit": "Database", "selftext_html": null, "selftext": "", "likes": null, "saved": false, "id": "fc3q5", "clicked": false, "author": "VitoBotta", "media": null, "score": 0, "over_18": false, "hidden": false, "thumbnail": "http://thumbs.reddit.com/t3_fc3q5.png", "subreddit_id": "t5_2qian", "downs": 2, "is_self": false, "permalink": "/r/Database/comments/fc3q5/painless_ultra_fast_hot_backups_and_restores_of/", "name": "t3_fc3q5", "created": 1296438817.0, "url": "http://vitobotta.com/painless-hot-backups-mysql-live-databases-percona-xtrabackup/", "title": "Painless, ultra fast hot backups and restores of MySQL databases with Percona\u2019s XtraBackup", "created_utc": 1296438817.0, "num_comments": 2, "ups": 2}}], "after": null, "before": null}}
@@ -0,0 +1 @@
1
+ {"kind": "Listing", "data": {"modhash": "", "children": [{"kind": "t3", "data": {"domain": "vitobotta.com", "media_embed": {}, "levenshtein": null, "subreddit": "ruby", "selftext_html": null, "selftext": "", "likes": null, "saved": false, "id": "ffik5", "clicked": false, "author": "VitoBotta", "media": null, "score": 30, "over_18": false, "hidden": false, "thumbnail": "", "subreddit_id": "t5_2qh21", "downs": 9, "is_self": false, "permalink": "/r/ruby/comments/ffik5/geeky_cv_d/", "name": "t3_ffik5", "created": 1296864258.0, "url": "http://vitobotta.com/cv-resume/", "title": "Geeky CV :D", "created_utc": 1296864258.0, "num_comments": 24, "ups": 39}}], "after": null, "before": null}}
@@ -0,0 +1 @@
1
+ {"kind": "Listing", "data": {"modhash": "", "children": [{"kind": "t3", "data": {"domain": "vitobotta.com", "media_embed": {}, "levenshtein": null, "subreddit": "ruby", "selftext_html": null, "selftext": "", "likes": null, "saved": false, "id": "ffik5", "clicked": false, "author": "VitoBotta", "media": null, "score": 31, "over_18": false, "hidden": false, "thumbnail": "", "subreddit_id": "t5_2qh21", "downs": 10, "is_self": false, "permalink": "/r/ruby/comments/ffik5/geeky_cv_d/", "name": "t3_ffik5", "created": 1296864258.0, "url": "http://vitobotta.com/cv-resume/", "title": "Geeky CV :D", "created_utc": 1296864258.0, "num_comments": 24, "ups": 41}}], "after": null, "before": null}}
@@ -0,0 +1,42 @@
1
+ <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
2
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
3
+
4
+ <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en" xmlns:fb="http://www.facebook.com/2008/fbml">
5
+ <head>
6
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8"/>
7
+
8
+
9
+ <link rel="stylesheet" href="http://cdn.stumble-upon.com/css/badges_su.css?v=20110331a" type="text/css" media="screen, projection" />
10
+
11
+ <script type="text/javascript" src="http://cdn.stumble-upon.com/js/badge_su.js?v=20110331a"></script>
12
+
13
+
14
+ <title></title>
15
+
16
+ </head>
17
+ <body class="badge5">
18
+
19
+ <div id="wrapper">
20
+
21
+
22
+
23
+
24
+
25
+
26
+
27
+
28
+ <ul class="suHostedBadge">
29
+ <li><a class="logo" onClick="javascript: SUJS.buttonWin.open('http://www.stumbleupon.com/badge/?url=http://vitobotta.com/cv-resume/'); void(0);" href="javascript:void(0);" target="_top">StumbleUpon</a></li>
30
+ <li>
31
+ <a class="count" onClick="javascript: SUJS.buttonWin.open('http://www.stumbleupon.com/badge/?url=http://vitobotta.com/cv-resume/'); void(0);" href="javascript:void(0);" target="_top">
32
+ <span>6</span>
33
+ </a>
34
+ </li>
35
+ </ul>
36
+ </div> <!-- end wrapper -->
37
+
38
+
39
+ <span id="__su_server_time__" class="1301846270"></span>
40
+
41
+ </body>
42
+ </html>
@@ -0,0 +1,127 @@
1
+ %w(rubygems active_support webmock/test_unit stringio system_timer).each{|g| require g}
2
+
3
+ include WebMock::API
4
+
5
+ require File.join(File.dirname(__FILE__), "../lib/share_counts")
6
+
7
+ SOME_URL = "http://vitobotta.com/cv-resume/"
8
+ SOME_PARAMS = [ :url => "http://vitobotta.com/cv-resume/", :callback => "myCallback" ]
9
+
10
+ class ActiveSupport::TestCase
11
+ setup do
12
+ $stderr = @stderr = StringIO.new
13
+ $stdin = @stdin = StringIO.new
14
+ $stdout = @stdout = StringIO.new
15
+ end
16
+
17
+ def teardown
18
+ $stderr = @stderr = STDERR
19
+ $stdin = @stdin = STDIN
20
+ $stdout = @stdout = STDOUT
21
+ end
22
+ end
23
+
24
+ class Reddit
25
+ def self.params
26
+ [:url => SOME_URL]
27
+ end
28
+ def self.api
29
+ "http://www.reddit.com/api/info.json"
30
+ end
31
+ def self.json
32
+ @json ||= File.read(File.join(File.dirname(__FILE__), "reddit.json"))
33
+ end
34
+ def self.url_info_json
35
+ @url_info_json ||= File.read(File.join(File.dirname(__FILE__), "reddit-url-info.json"))
36
+ end
37
+ def self.by_domain_json
38
+ @by_domain_json ||= File.read(File.join(File.dirname(__FILE__), "reddit-by-domain.json"))
39
+ end
40
+ def self.selector
41
+ "data/children/data/score"
42
+ end
43
+ end
44
+
45
+ class Digg
46
+ def self.api
47
+ "http://services.digg.com/2.0/story.getInfo"
48
+ end
49
+ def self.params
50
+ [:links => SOME_URL]
51
+ end
52
+ def self.json
53
+ @json ||= File.read(File.join(File.dirname(__FILE__), "digg.json"))
54
+ end
55
+ def self.selector
56
+ "stories/diggs"
57
+ end
58
+ end
59
+
60
+ class Twitter
61
+ def self.api
62
+ "http://urls.api.twitter.com/1/urls/count.json"
63
+ end
64
+ def self.params
65
+ [:url => SOME_URL]
66
+ end
67
+ def self.json
68
+ @json ||= File.read(File.join(File.dirname(__FILE__), "twitter.json"))
69
+ end
70
+ def self.selector
71
+ "count"
72
+ end
73
+ end
74
+
75
+ class Facebook
76
+ def self.api
77
+ "http://api.facebook.com/restserver.php"
78
+ end
79
+ def self.json
80
+ @json ||= File.read(File.join(File.dirname(__FILE__), "facebook.json"))
81
+ end
82
+ def self.params
83
+ [:v => "1.0", :method => "links.getStats", :urls => SOME_URL, :callback => "fb_sharepro_render", :format => "json"]
84
+ end
85
+ def self.selector
86
+ "total_count"
87
+ end
88
+ end
89
+
90
+ class Linkedin
91
+ def self.api
92
+ "http://www.linkedin.com/cws/share-count"
93
+ end
94
+ def self.params
95
+ [:url => SOME_URL, :callback => "IN.Tags.Share.handleCount"]
96
+ end
97
+ def self.json
98
+ @json ||= File.read(File.join(File.dirname(__FILE__), "linkedin.json"))
99
+ end
100
+ def self.selector
101
+ "count"
102
+ end
103
+ end
104
+
105
+ class GoogleBuzz
106
+ def self.api
107
+ "http://www.google.com/buzz/api/buzzThis/buzzCounter"
108
+ end
109
+ def self.params
110
+ [:url => SOME_URL, :callback => "google_buzz_set_count"]
111
+ end
112
+ def self.json
113
+ @json ||= File.read(File.join(File.dirname(__FILE__), "googlebuzz.json"))
114
+ end
115
+ end
116
+
117
+ class StumbleUpon
118
+ def self.api
119
+ "http://www.stumbleupon.com/badge/embed/5/"
120
+ end
121
+ def self.params
122
+ [:url => SOME_URL]
123
+ end
124
+ def self.html
125
+ @json ||= File.read(File.join(File.dirname(__FILE__), "stumbleupon.html"))
126
+ end
127
+ end
@@ -0,0 +1,83 @@
1
+ require File.expand_path(File.join(File.dirname(__FILE__), "test_helper"))
2
+
3
+ class RedditModuleTest < ActiveSupport::TestCase
4
+ test ".info_for should return a hash with score and permalink for the given url" do
5
+ stub_request(:get, Reddit.api).with(:query => Reddit.params.to_hash).to_return(:body => Reddit.url_info_json)
6
+ assert_equal({ "permalink" => "/r/ruby/comments/ffik5/geeky_cv_d/", "score" => 30}, ShareCounts::Reddit.info_for(SOME_URL))
7
+ end
8
+
9
+ test ".info_for with raise_exceptions=true should raise exception" do
10
+ stub_request(:get, Reddit.api).with(:query => Reddit.params.to_hash).to_raise(Exception)
11
+ assert_raise(Exception) { ShareCounts::Reddit.info_for(SOME_URL, true) }
12
+ end
13
+
14
+ test ".by_domain should return permalink and score for each URL Reddit knows for the given domain" do
15
+ stub_request(:get, "http://www.reddit.com/domain/vitobotta.com.json").to_return(:body => Reddit.by_domain_json)
16
+
17
+ result = {
18
+ "http://vitobotta.com/protect-wordpress-blogs-administration-prying-eyes/" => {
19
+ "permalink" => "/r/Wordpress/comments/g51dn/protect_your_wordpress_blogs_administration_from/",
20
+ "score" => 8
21
+ },
22
+
23
+ "http://vitobotta.com/why-isnt-ssl-on-by-default-for-all-websites/" => {
24
+ "permalink" => "/r/geek/comments/fuepg/why_isnt_ssl_turned_on_by_default_for_all_websites/",
25
+ "score" => 4
26
+ },
27
+
28
+ "http://vitobotta.com/faster-internet-browsing-alternative-dns-servers-fast-local-cache-bind/" => {
29
+ "permalink" => "/r/apple/comments/fmkxz/faster_internet_browsing_with_alternative_dns/",
30
+ "score" => 22
31
+ },
32
+
33
+ "http://vitobotta.com/web-typography-techniques-usability-performance-seo-security/" => {
34
+ "permalink" => "/r/web_design/comments/fhw72/a_uptodate_look_at_the_state_of_web_typography/",
35
+ "score" => 3
36
+ },
37
+
38
+ "http://vitobotta.com/cv-resume/" => {
39
+ "permalink" => "/r/ruby/comments/ffik5/geeky_cv_d/",
40
+ "score" => 29
41
+ },
42
+
43
+ "http://vitobotta.com/share-counts-gem-social-networks/" => {
44
+ "permalink" => "/r/ruby/comments/fceyq/share_counts_ruby_gem_the_easiest_way_to_check/",
45
+ "score" => 4
46
+ },
47
+
48
+ "http://vitobotta.com/serialisable-validatable-tableless-model/" => {
49
+ "permalink" => "/r/ruby/comments/fb585/a_serialisable_and_validatable_tableless_model_to/",
50
+ "score" => 2
51
+ },
52
+
53
+ "http://vitobotta.com/smarter-faster-backups-restores-mysql-databases-with-mysqldump/" => {
54
+ "permalink" => "/r/Database/comments/faaji/smarter_faster_backups_and_restores_of_mysql/",
55
+ "score" => 5
56
+ },
57
+
58
+ "http://vitobotta.com/awesomeprint-similar-production/" => {
59
+ "permalink" => "/r/ruby/comments/fa2k4/why_you_should_think_twice_before_using_awesome/",
60
+ "score" => 7
61
+ },
62
+
63
+ "http://vitobotta.com/migrating-from-wordpress-to-jekyll-part-one-why-I-gave-up-on-wordpress/" => {
64
+ "permalink" => "/r/webdev/comments/g8yy9/migrating_from_wordpress_to_jekyll_part_1_why_i/",
65
+ "score" => 0
66
+ },
67
+
68
+ "http://vitobotta.com/painless-hot-backups-mysql-live-databases-percona-xtrabackup/" => {
69
+ "permalink" => "/r/Database/comments/fc3q5/painless_ultra_fast_hot_backups_and_restores_of/",
70
+ "score" => 0
71
+ }
72
+
73
+ }
74
+
75
+ assert_equal(result, ShareCounts::Reddit.by_domain("vitobotta.com"))
76
+ end
77
+
78
+
79
+ test ".by_domain with raise_exceptions=true should raise exception" do
80
+ stub_request(:get, "http://www.reddit.com/domain/vitobotta.com.json").to_raise(Exception)
81
+ assert_raise(Exception) { ShareCounts::Reddit.by_domain("vitobotta.com", true) }
82
+ end
83
+ end
@@ -0,0 +1,183 @@
1
+ require File.expand_path(File.join(File.dirname(__FILE__), "test_helper"))
2
+
3
+ class ShareCountsTest < ActiveSupport::TestCase
4
+
5
+ def stub_all
6
+ stub_request(:get, Reddit.api).with(:query => Reddit.params.to_hash).to_return(:body => Reddit.json)
7
+ stub_request(:get, Digg.api).with(:query => Digg.params.to_hash).to_return(:body => Digg.json)
8
+ stub_request(:get, Facebook.api).with(:query => Facebook.params.to_hash).to_return(:body => Facebook.json)
9
+ stub_request(:get, Twitter.api).with(:query => Twitter.params.to_hash).to_return(:body => Twitter.json)
10
+ stub_request(:get, Linkedin.api).with(:query => Linkedin.params.to_hash).to_return(:body => Linkedin.json)
11
+ stub_request(:get, GoogleBuzz.api).with(:query => GoogleBuzz.params.to_hash).to_return(:body => GoogleBuzz.json)
12
+ stub_request(:get, StumbleUpon.api).with(:query => StumbleUpon.params.to_hash).to_return(:body => StumbleUpon.html)
13
+ end
14
+
15
+ test ".supported_networks returns the supported networks" do
16
+ assert_equal(%w(reddit digg twitter facebook linkedin googlebuzz stumbleupon).sort, ShareCounts.supported_networks.sort)
17
+ end
18
+
19
+ test ".make_request makes a request to a remove service and returns the response" do
20
+ stub_request(:get, SOME_URL).with(:query => SOME_PARAMS.to_hash).to_return(:body => "---RESPONSE---")
21
+
22
+ assert_equal("---RESPONSE---", ShareCounts.send(:make_request, SOME_URL, *SOME_PARAMS ))
23
+ assert_equal(0, @stdout.string.split("\n").size)
24
+ end
25
+
26
+ test ".make_request should raise an exception if the remote service returns a 500 status code for three attempts" do
27
+ stub_request(:get, SOME_URL).to_return(:status => [500, "Internal Server Error"])
28
+
29
+ assert_raise(Exception) { ShareCounts.send(:make_request, SOME_URL) }
30
+
31
+ errors = []
32
+ 3.times {|n| errors << "Failed #{n+1} attempt(s) - 500 Internal Server Error" }
33
+ assert_equal(errors.sort, @stdout.string.split("\n").sort)
34
+ end
35
+
36
+
37
+ test ".make_request should raise an exception if the remote service times out for three attempts" do
38
+ stub_request(:get, SOME_URL).to_timeout
39
+
40
+ assert_raise(Exception) { ShareCounts.send(:make_request, SOME_URL) }
41
+
42
+ errors = []
43
+ 3.times {|n| errors << "Failed #{n+1} attempt(s) - Request Timeout" }
44
+ assert_equal(errors.sort, @stdout.string.split("\n").sort)
45
+ end
46
+
47
+ test ".make_request should return response if remote service fails < 3 attempts" do
48
+ stub_request(:get, SOME_URL).
49
+ to_return(:status => [500, "Internal Server Error"]).then.
50
+ to_timeout.then.
51
+ to_return(:body => "---RESPONSE---" )
52
+
53
+ assert_nothing_raised(Exception) { assert_equal("---RESPONSE---", ShareCounts.send(:make_request, SOME_URL)) }
54
+
55
+ assert_equal(["Failed 1 attempt(s) - 500 Internal Server Error", "Failed 2 attempt(s) - Request Timeout"].sort, @stdout.string.split("\n").sort)
56
+ end
57
+
58
+ test ".make_request should strip the callback call from the JSON response if a callback has been specified" do
59
+ stub_request(:get, SOME_URL).with(:query => SOME_PARAMS.to_hash).
60
+ to_return(:body => "myCallback(JSON_DATA);").then.
61
+ to_return(:body => "myCallback(JSON_DATA)")
62
+
63
+ assert_equal("JSON_DATA", ShareCounts.send(:make_request, SOME_URL, *SOME_PARAMS ))
64
+ assert_equal("JSON_DATA", ShareCounts.send(:make_request, SOME_URL, *SOME_PARAMS ))
65
+ assert_equal(0, @stdout.string.split("\n").size)
66
+ end
67
+
68
+
69
+ test ".from_json parses the JSON response returned by a remote service" do
70
+ stub_request(:get, SOME_URL).to_return(:body => "{\"a\":1,\"b\":2}").then.to_return(:body => "[\"a\", \"b\", 1, 2]")
71
+ stub_request(:get, SOME_URL).with(:query => SOME_PARAMS.to_hash).to_return(:body => "myCallback({\"a\":1,\"b\":2})")
72
+
73
+ assert_equal({ "a" => 1, "b" => 2 }, ShareCounts.send(:from_json, SOME_URL))
74
+ assert_equal(["a", "b", 1, 2], ShareCounts.send(:from_json, SOME_URL))
75
+ assert_equal({ "a" => 1, "b" => 2 }, ShareCounts.send(:from_json, SOME_URL, *SOME_PARAMS ))
76
+ assert_equal(0, @stdout.string.split("\n").size)
77
+ end
78
+
79
+ test ".extract_info correctly extract the information from the parsed JSON data received, in XPATH style" do
80
+ teardown
81
+
82
+ stub_all
83
+
84
+ assert_equal(31, ShareCounts.send(:extract_info, ShareCounts.send(:from_json, Reddit.api, *Reddit.params), { :selector => Reddit.selector } ))
85
+ assert_equal(1, ShareCounts.send(:extract_info, ShareCounts.send(:from_json, Digg.api, *Digg.params), { :selector => Digg.selector } ))
86
+ assert_equal(35, ShareCounts.send(:extract_info, ShareCounts.send(:from_json, Twitter.api, *Twitter.params), { :selector => Twitter.selector } ))
87
+ assert_equal(23, ShareCounts.send(:extract_info, ShareCounts.send(:from_json, Facebook.api, *Facebook.params), { :selector => Facebook.selector } ))
88
+ assert_equal(23, ShareCounts.send(:extract_info, ShareCounts.send(:from_json, Linkedin.api, *Linkedin.params), { :selector => Linkedin.selector } ))
89
+ end
90
+
91
+ test ".reddit should return the reddit score" do
92
+ stub_request(:get, Reddit.api).with(:query => Reddit.params.to_hash).to_return(:body => Reddit.json)
93
+ assert_equal(31, ShareCounts.reddit(SOME_URL))
94
+ end
95
+
96
+ test ".reddit with raise_exceptions=true should raise exception" do
97
+ stub_request(:get, Reddit.api).with(:query => Reddit.params.to_hash).to_raise(Exception)
98
+ assert_raise(Exception) { ShareCounts.reddit(SOME_URL, true) }
99
+ end
100
+
101
+ test ".digg should return the digg score" do
102
+ stub_request(:get, Digg.api).with(:query => Digg.params.to_hash).to_return(:body => Digg.json)
103
+ assert_equal(1, ShareCounts.digg(SOME_URL))
104
+ end
105
+
106
+ test ".digg with raise_exceptions=true should raise exception" do
107
+ stub_request(:get, Digg.api).with(:query => Digg.params.to_hash).to_raise(Exception)
108
+ assert_raise(Exception) { ShareCounts.digg(SOME_URL, true) }
109
+ end
110
+
111
+ test ".twitter should return the twitter score" do
112
+ stub_request(:get, Twitter.api).with(:query => Twitter.params.to_hash).to_return(:body => Twitter.json)
113
+ assert_equal(35, ShareCounts.twitter(SOME_URL))
114
+ end
115
+
116
+ test ".twitter with raise_exceptions=true should raise exception" do
117
+ stub_request(:get, Twitter.api).with(:query => Twitter.params.to_hash).to_raise(Exception)
118
+ assert_raise(Exception) { ShareCounts.twitter(SOME_URL, true) }
119
+ end
120
+
121
+ test ".facebook should return the facebook score" do
122
+ stub_request(:get, Facebook.api).with(:query => Facebook.params.to_hash).to_return(:body => Facebook.json)
123
+ assert_equal(23, ShareCounts.facebook(SOME_URL))
124
+ end
125
+
126
+ test ".facebook with raise_exceptions=true should raise exception" do
127
+ stub_request(:get, Facebook.api).with(:query => Facebook.params.to_hash).to_raise(Exception)
128
+ assert_raise(Exception) { ShareCounts.facebook(SOME_URL, true) }
129
+ end
130
+
131
+ test ".linkedin should return the linkedin score" do
132
+ stub_request(:get, Linkedin.api).with(:query => Linkedin.params.to_hash).to_return(:body => Linkedin.json)
133
+ assert_equal(23, ShareCounts.linkedin(SOME_URL))
134
+ end
135
+
136
+ test ".linkedin with raise_exceptions=true should raise exception" do
137
+ stub_request(:get, Linkedin.api).with(:query => Linkedin.params.to_hash).to_raise(Exception)
138
+ assert_raise(Exception) { ShareCounts.linkedin(SOME_URL, true) }
139
+ end
140
+
141
+ test ".googlebuzz should return the googlebuzz score" do
142
+ stub_request(:get, GoogleBuzz.api).with(:query => GoogleBuzz.params.to_hash).to_return(:body => GoogleBuzz.json)
143
+ assert_equal(1, ShareCounts.googlebuzz(SOME_URL))
144
+ end
145
+
146
+ test ".googlebuzz with raise_exceptions=true should raise exception" do
147
+ stub_request(:get, GoogleBuzz.api).with(:query => GoogleBuzz.params.to_hash).to_raise(Exception)
148
+ assert_raise(Exception) { ShareCounts.googlebuzz(SOME_URL, true) }
149
+ end
150
+
151
+ test ".stumbleupon should return the stumbleupon score" do
152
+ stub_request(:get, StumbleUpon.api).with(:query => StumbleUpon.params.to_hash).to_return(:body => StumbleUpon.html)
153
+ assert_equal(6, ShareCounts.stumbleupon(SOME_URL))
154
+ end
155
+
156
+ test ".stumbleupon with raise_exceptions=true should raise exception" do
157
+ stub_request(:get, StumbleUpon.api).with(:query => StumbleUpon.params.to_hash).to_raise(Exception)
158
+ assert_raise(Exception) { ShareCounts.stumbleupon(SOME_URL, true) }
159
+ end
160
+
161
+ test ".reddit_with_permalink should return a hash with Reddit score and permalink" do
162
+ stub_request(:get, Reddit.api).with(:query => Reddit.params.to_hash).to_return(:body => Reddit.json)
163
+ assert_equal({ "permalink" => "/r/ruby/comments/ffik5/geeky_cv_d/", "score" => 31 }, ShareCounts.reddit_with_permalink(SOME_URL))
164
+ end
165
+
166
+ test ".reddit_with_permalink with raise_exceptions=true should raise exception" do
167
+ stub_request(:get, Reddit.api).with(:query => Reddit.params.to_hash).to_raise(Exception)
168
+ assert_raise(Exception) { ShareCounts.reddit_with_permalink(SOME_URL, true) }
169
+ end
170
+
171
+ test ".all should returns scores for all the known networks" do
172
+ stub_all
173
+ assert_equal({ :digg => 1, :reddit => 31, :twitter => 35, :facebook => 23, :linkedin => 23, :googlebuzz => 1, :linkedin => 23, :stumbleupon => 6 }, ShareCounts.all(SOME_URL))
174
+ end
175
+
176
+ test ".selected should only return scores for the networks specified" do
177
+ stub_request(:get, Twitter.api).with(:query => Twitter.params.to_hash).to_return(:body => Twitter.json)
178
+ stub_request(:get, Linkedin.api).with(:query => Linkedin.params.to_hash).to_return(:body => Linkedin.json)
179
+
180
+ assert_equal({ :twitter => 35, :linkedin => 23 }, ShareCounts.selected(SOME_URL, ["twitter", "linkedin"]))
181
+ end
182
+
183
+ end
@@ -0,0 +1 @@
1
+ {"count":35,"url":"http://vitobotta.com/cv-resume/"}
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: share_counts
3
3
  version: !ruby/object:Gem::Version
4
- hash: 13
4
+ hash: 27
5
5
  prerelease:
6
6
  segments:
7
7
  - 0
8
+ - 1
8
9
  - 0
9
- - 9
10
- version: 0.0.9
10
+ version: 0.1.0
11
11
  platform: ruby
12
12
  authors:
13
13
  - Vito Botta
@@ -15,7 +15,7 @@ autorequire:
15
15
  bindir: bin
16
16
  cert_chain: []
17
17
 
18
- date: 2011-03-26 23:00:00 +00:00
18
+ date: 2011-04-03 00:00:00 +01:00
19
19
  default_executable:
20
20
  dependencies:
21
21
  - !ruby/object:Gem::Dependency
@@ -75,7 +75,7 @@ dependencies:
75
75
  type: :runtime
76
76
  version_requirements: *id004
77
77
  - !ruby/object:Gem::Dependency
78
- name: minitest
78
+ name: SystemTimer
79
79
  prerelease: false
80
80
  requirement: &id005 !ruby/object:Gem::Requirement
81
81
  none: false
@@ -86,10 +86,10 @@ dependencies:
86
86
  segments:
87
87
  - 0
88
88
  version: "0"
89
- type: :development
89
+ type: :runtime
90
90
  version_requirements: *id005
91
91
  - !ruby/object:Gem::Dependency
92
- name: ansi
92
+ name: webmock
93
93
  prerelease: false
94
94
  requirement: &id006 !ruby/object:Gem::Requirement
95
95
  none: false
@@ -102,6 +102,62 @@ dependencies:
102
102
  version: "0"
103
103
  type: :development
104
104
  version_requirements: *id006
105
+ - !ruby/object:Gem::Dependency
106
+ name: activesupport
107
+ prerelease: false
108
+ requirement: &id007 !ruby/object:Gem::Requirement
109
+ none: false
110
+ requirements:
111
+ - - ">="
112
+ - !ruby/object:Gem::Version
113
+ hash: 3
114
+ segments:
115
+ - 0
116
+ version: "0"
117
+ type: :development
118
+ version_requirements: *id007
119
+ - !ruby/object:Gem::Dependency
120
+ name: autotest-growl
121
+ prerelease: false
122
+ requirement: &id008 !ruby/object:Gem::Requirement
123
+ none: false
124
+ requirements:
125
+ - - ">="
126
+ - !ruby/object:Gem::Version
127
+ hash: 3
128
+ segments:
129
+ - 0
130
+ version: "0"
131
+ type: :development
132
+ version_requirements: *id008
133
+ - !ruby/object:Gem::Dependency
134
+ name: autotest-fsevent
135
+ prerelease: false
136
+ requirement: &id009 !ruby/object:Gem::Requirement
137
+ none: false
138
+ requirements:
139
+ - - ">="
140
+ - !ruby/object:Gem::Version
141
+ hash: 3
142
+ segments:
143
+ - 0
144
+ version: "0"
145
+ type: :development
146
+ version_requirements: *id009
147
+ - !ruby/object:Gem::Dependency
148
+ name: redgreen
149
+ prerelease: false
150
+ requirement: &id010 !ruby/object:Gem::Requirement
151
+ none: false
152
+ requirements:
153
+ - - ">="
154
+ - !ruby/object:Gem::Version
155
+ hash: 3
156
+ segments:
157
+ - 0
158
+ version: "0"
159
+ type: :development
160
+ version_requirements: *id010
105
161
  description: The easiest way to check how many times a URL has been shared on Reddit, Digg, Twitter, Facebook, LinkedIn, GoogleBuzz and StumbleUpon!
106
162
  email:
107
163
  - vito@botta.name
@@ -112,18 +168,32 @@ extensions: []
112
168
  extra_rdoc_files: []
113
169
 
114
170
  files:
171
+ - .autotest
115
172
  - .gitignore
116
173
  - Gemfile
117
174
  - Gemfile.lock
118
175
  - README.rdoc
119
176
  - Rakefile
177
+ - autotest/discover.rb
178
+ - autotest/rules.rb
120
179
  - lib/share_counts.rb
180
+ - lib/share_counts/array.rb
121
181
  - lib/share_counts/caching.rb
122
182
  - lib/share_counts/common.rb
123
183
  - lib/share_counts/reddit.rb
124
184
  - share_counts.gemspec
125
- - spec/share_count_spec.rb
126
- - spec/test_helper.rb
185
+ - test/digg.json
186
+ - test/facebook.json
187
+ - test/googlebuzz.json
188
+ - test/linkedin.json
189
+ - test/reddit-by-domain.json
190
+ - test/reddit-url-info.json
191
+ - test/reddit.json
192
+ - test/stumbleupon.html
193
+ - test/test_helper.rb
194
+ - test/test_reddit_module.rb
195
+ - test/test_share_counts.rb
196
+ - test/twitter.json
127
197
  has_rdoc: true
128
198
  homepage: https://github.com/vitobotta/share_counts
129
199
  licenses: []
@@ -154,10 +224,20 @@ required_rubygems_version: !ruby/object:Gem::Requirement
154
224
  requirements: []
155
225
 
156
226
  rubyforge_project: share_counts
157
- rubygems_version: 1.5.2
227
+ rubygems_version: 1.6.2
158
228
  signing_key:
159
229
  specification_version: 3
160
230
  summary: The easiest way to check how many times a URL has been shared on Reddit, Digg, Twitter, Facebook, LinkedIn, GoogleBuzz and StumbleUpon!
161
231
  test_files:
162
- - spec/share_count_spec.rb
163
- - spec/test_helper.rb
232
+ - test/digg.json
233
+ - test/facebook.json
234
+ - test/googlebuzz.json
235
+ - test/linkedin.json
236
+ - test/reddit-by-domain.json
237
+ - test/reddit-url-info.json
238
+ - test/reddit.json
239
+ - test/stumbleupon.html
240
+ - test/test_helper.rb
241
+ - test/test_reddit_module.rb
242
+ - test/test_share_counts.rb
243
+ - test/twitter.json
@@ -1,3 +0,0 @@
1
- require File.expand_path(File.join(File.dirname(__FILE__), "test_helper"))
2
-
3
- # nothing in here yet!
@@ -1,157 +0,0 @@
1
- require 'rubygems'
2
- gem "minitest"
3
- require 'minitest/autorun'
4
-
5
- require 'ansi'
6
-
7
- class MiniTest::Unit
8
- include ANSI::Code
9
-
10
- PADDING_SIZE = 4
11
-
12
- def run(args = [])
13
- @verbose = true
14
-
15
- filter = if args.first =~ /^(-n|--name)$/ then
16
- args.shift
17
- arg = args.shift
18
- arg =~ /\/(.*)\// ? Regexp.new($1) : arg
19
- else
20
- /./ # anything - ^test_ already filtered by #tests
21
- end
22
-
23
- @@out.puts "Loaded suite #{$0.sub(/\.rb$/, '')}\nStarted"
24
-
25
- start = Time.now
26
- run_test_suites filter
27
-
28
- @@out.puts
29
- @@out.puts "Finished in #{'%.6f' % (Time.now - start)} seconds."
30
-
31
- @@out.puts
32
-
33
- @@out.print "%d tests, " % test_count
34
- @@out.print "%d assertions, " % assertion_count
35
- @@out.print red { "%d failures, " % failures }
36
- @@out.print yellow { "%d errors, " % errors }
37
- @@out.puts cyan { "%d skips" % skips}
38
-
39
- return failures + errors if @test_count > 0 # or return nil...
40
- end
41
-
42
- # Overwrite #run_test_suites so that it prints out reports
43
- # as errors are generated.
44
- def run_test_suites(filter = /./)
45
- @test_count, @assertion_count = 0, 0
46
- old_sync, @@out.sync = @@out.sync, true if @@out.respond_to? :sync=
47
-
48
- TestCase.test_suites.each do |suite|
49
- test_cases = suite.test_methods.grep(filter)
50
- if test_cases.size > 0
51
- @@out.print "\n#{suite}:\n"
52
- end
53
-
54
- test_cases.each do |test|
55
- inst = suite.new test
56
- inst._assertions = 0
57
-
58
- t = Time.now
59
-
60
- @broken = nil
61
-
62
- @@out.print(case inst.run(self)
63
- when :pass
64
- @broken = false
65
- green { pad_with_size "PASS" }
66
- when :error
67
- @broken = true
68
- yellow { pad_with_size "ERROR" }
69
- when :fail
70
- @broken = true
71
- red { pad_with_size "FAIL" }
72
- when :skip
73
- @broken = false
74
- cyan { pad_with_size "SKIP" }
75
- end)
76
-
77
-
78
- # @@out.print " #{test.humanize.gsub(/Test\s\d+\s(.*)/,"\\1")} "
79
- @@out.print " #{test} "
80
- @@out.print " (%.2fs) " % (Time.now - t)
81
-
82
- if @broken
83
- @@out.puts
84
-
85
- report = @report.last
86
- @@out.puts pad(report[:message], 10)
87
- trace = MiniTest::filter_backtrace(report[:exception].backtrace).first
88
- @@out.print pad(trace, 10)
89
-
90
- @@out.puts
91
- end
92
-
93
- @@out.puts
94
- @test_count += 1
95
- @assertion_count += inst._assertions
96
- end
97
- end
98
- @@out.sync = old_sync if @@out.respond_to? :sync=
99
- [@test_count, @assertion_count]
100
- end
101
-
102
- def pad(str, size=PADDING_SIZE)
103
- " " * size + str
104
- end
105
-
106
- def pad_with_size(str)
107
- pad("%5s" % str)
108
- end
109
-
110
- # Overwrite #puke method so that is stores a hash
111
- # with :message and :exception keys.
112
- def puke(klass, meth, e)
113
- result = nil
114
- msg = case e
115
- when MiniTest::Skip
116
- @skips += 1
117
- result = :skip
118
- e.message
119
- when MiniTest::Assertion
120
- @failures += 1
121
- result = :fail
122
- e.message
123
- else
124
- @errors += 1
125
- result = :error
126
- "#{e.class}: #{e.message}\n"
127
- end
128
-
129
- @report << {:message => msg, :exception => e}
130
- result
131
- end
132
-
133
-
134
- class TestCase
135
- # Overwrite #run method so that is uses symbols
136
- # as return values rather than characters.
137
- def run(runner)
138
- result = :pass
139
- begin
140
- @passed = nil
141
- self.setup
142
- self.send self.__name__
143
- @passed = true
144
- rescue Exception => e
145
- @passed = false
146
- result = runner.puke(self.class, self.__name__, e)
147
- ensure
148
- begin
149
- self.teardown
150
- rescue Exception => e
151
- result = runner.puke(self.class, self.__name__, e)
152
- end
153
- end
154
- result
155
- end
156
- end
157
- end