DRMacIver-gourmand 0.0.3 → 0.0.9

Sign up to get free protection for your applications and to get access to all the features.
data/.gitignore ADDED
@@ -0,0 +1 @@
1
+ pkg
data/Rakefile CHANGED
@@ -1,5 +1,6 @@
1
1
  require 'rubygems'
2
2
  require 'rake'
3
+ require 'spec/rake/spectask'
3
4
 
4
5
  require 'jeweler'
5
6
  Jeweler::Tasks.new do |gem|
@@ -13,5 +14,10 @@ Jeweler::Tasks.new do |gem|
13
14
  gem.add_dependency("json_pure")
14
15
  gem.add_dependency("mechanize")
15
16
  gem.add_dependency("httparty")
16
- # gem is a Gem::Specification... see http://www.rubygems.org/read/chapter/20 for additional settings
17
+ end
18
+
19
+ Spec::Rake::SpecTask.new do |t|
20
+ t.rcov = false
21
+ t.spec_files = FileList["spec/**/*_spec.rb"]
22
+ t.libs << "./lib"
17
23
  end
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.0.3
1
+ 0.0.9
data/bin/gourmand CHANGED
@@ -9,7 +9,7 @@ def usage
9
9
  gourmand update # Update a gourmand instance, posting the new bookmarks to delicious"
10
10
  gourmand undo [site] # deletes all imported posts (from specific site, or all if not specified)
11
11
  USAGE
12
- Reddilicious.site_names.each do |site|
12
+ Gourmand.site_names.each do |site|
13
13
  STDERR.puts " gourmand #{site} # set your #{site} user"
14
14
  end
15
15
 
@@ -17,11 +17,10 @@ def usage
17
17
  end
18
18
 
19
19
  dir=ENV["GOURMAND_HOME"] || File.join(ENV["HOME"], ".gourmand")
20
- gourmand = Reddilicious.new(dir)
20
+ gourmand = Gourmand.new(dir)
21
21
 
22
- if !File.directory?(dir)
22
+ if !gourmand.exists?
23
23
  puts "no such directory #{dir}. Creating..."
24
- Dir.mkdir(dir)
25
24
 
26
25
  puts "Delicious user name:"
27
26
  delicious = STDIN.gets.strip
@@ -29,6 +28,16 @@ if !File.directory?(dir)
29
28
  puts "Delicious password:"
30
29
  delicious_password = STDIN.gets.strip
31
30
  gourmand.create!(delicious, delicious_password)
31
+ elsif gourmand.needs_update?
32
+ puts "Your gourmand instance needs updating to use with this version. Do you want to update now? y/n"
33
+ if STDIN.gets =~ /y/i
34
+ gourmand.update!
35
+ else
36
+ exit(1)
37
+ end
38
+ elsif gourmand.from_the_future?
39
+ puts "This gourmand instance is more recent than the version of gourmand you are running. Please update in order to use it"
40
+ exit(1)
32
41
  end
33
42
 
34
43
  case ARGV[0]
@@ -39,10 +48,10 @@ case ARGV[0]
39
48
  $stderr = log
40
49
  gourmand.transfer_to_delicious
41
50
  end
42
- when *Reddilicious.site_names:
51
+ when *Gourmand.site_names:
43
52
  gourmand.site_for(ARGV[0]).ask_for_credentials
44
53
  when "undo"
45
- sites = ARGV[1..-1].empty? ? Reddilicious.site_names : ARGV[1..-1]
54
+ sites = ARGV[1..-1].empty? ? Gourmand.site_names : ARGV[1..-1]
46
55
  puts "undo import for sites #{sites.inspect}: are you sure? (y/n)"
47
56
  if STDIN.gets.strip.downcase == 'y'
48
57
  sites.each { |s| gourmand.site_for(s).undo_import! }
data/gourmand.gemspec CHANGED
@@ -2,11 +2,11 @@
2
2
 
3
3
  Gem::Specification.new do |s|
4
4
  s.name = %q{gourmand}
5
- s.version = "0.0.3"
5
+ s.version = "0.0.9"
6
6
 
7
7
  s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
8
8
  s.authors = ["David R. MacIver"]
9
- s.date = %q{2009-07-17}
9
+ s.date = %q{2009-08-15}
10
10
  s.default_executable = %q{gourmand}
11
11
  s.email = %q{david.maciver@gmail.com}
12
12
  s.executables = ["gourmand"]
@@ -15,26 +15,35 @@ Gem::Specification.new do |s|
15
15
  "README.markdown"
16
16
  ]
17
17
  s.files = [
18
- "LICENSE",
18
+ ".gitignore",
19
+ "LICENSE",
19
20
  "README.markdown",
20
21
  "Rakefile",
21
22
  "VERSION",
22
23
  "bin/gourmand",
23
24
  "gourmand.gemspec",
24
- "lib/blacklist.rb",
25
25
  "lib/delicious.rb",
26
26
  "lib/gourmand.rb",
27
27
  "lib/post.rb",
28
28
  "lib/reddit.rb",
29
29
  "lib/site.rb",
30
30
  "lib/stumbleupon.rb",
31
- "lib/twitter.rb"
31
+ "lib/twitter.rb",
32
+ "lib/version.rb",
33
+ "spec/post_spec.rb",
34
+ "spec/spec_helper.rb",
35
+ "spec/twitter_spec.rb"
32
36
  ]
33
37
  s.homepage = %q{http://github.com/DRMacIver/gourmand}
34
38
  s.rdoc_options = ["--charset=UTF-8"]
35
39
  s.require_paths = ["lib"]
36
40
  s.rubygems_version = %q{1.3.4}
37
41
  s.summary = %q{gourmand is a tool for automatically importing links into delicious}
42
+ s.test_files = [
43
+ "spec/spec_helper.rb",
44
+ "spec/post_spec.rb",
45
+ "spec/twitter_spec.rb"
46
+ ]
38
47
 
39
48
  if s.respond_to? :specification_version then
40
49
  current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
data/lib/gourmand.rb CHANGED
@@ -4,9 +4,43 @@ require "delicious"
4
4
  require "json"
5
5
  require 'net/http'
6
6
  require 'uri'
7
- require "blacklist"
7
+ require 'version'
8
+ require "fileutils"
9
+
10
+ class Gourmand
11
+ def self.version
12
+ Version.new(IO.read(File.join(File.dirname(__FILE__), "..", "VERSION")))
13
+ end
14
+
15
+ def version_file
16
+ File.join(@dir, "VERSION")
17
+ end
18
+
19
+ def version
20
+ if File.exists? version_file
21
+ Version.new(IO.read(version_file))
22
+ else
23
+ # the version right before this feature was added
24
+ Version.new("0.0.5")
25
+ end
26
+ end
27
+
28
+ def needs_update?
29
+ version < Gourmand.version
30
+ end
31
+
32
+ def from_the_future?
33
+ version > Gourmand.version
34
+ end
35
+
36
+ def update!
37
+ Migrations.new(self).migrate!
38
+ end
39
+
40
+ def exists?
41
+ File.exists?(@dir)
42
+ end
8
43
 
9
- class Reddilicious
10
44
  # lambdas to get lazy loading
11
45
  SitesToClasses = {
12
46
  "reddit" => lambda{
@@ -25,9 +59,8 @@ class Reddilicious
25
59
  }
26
60
 
27
61
  attr_accessor :dir
28
- def initialize(dir)
62
+ def initialize(dir=File.join(ENV["HOME"], ".gourmand"))
29
63
  @dir = dir
30
- @blacklist = Blacklist.from_file(File.join(dir, "blacklist"))
31
64
  @untiny_cache = if File.exists?(untiny_cache_file)
32
65
  JSON.parse(IO.read(untiny_cache_file))
33
66
  else
@@ -49,6 +82,8 @@ class Reddilicious
49
82
  end
50
83
 
51
84
  def create!(delicious, delicious_password)
85
+ Dir.mkdir(dir)
86
+ File.open(version_file, "w"){|o| o.puts Gourmand.version}
52
87
  File.open(details_file, "w"){|o|
53
88
  o.puts({:delicious_user => delicious, :delicious_password => delicious_password}.to_json)
54
89
  }
@@ -86,11 +121,9 @@ class Reddilicious
86
121
  end
87
122
  end
88
123
 
89
- def bookmark_for(url, suggest_tags=true)
124
+ def bookmark_for(url)
90
125
  url = untiny_url(url)
91
- Post.new do |post|
92
- post.url = url
93
- end
126
+ Post.new(:url=>url)
94
127
  end
95
128
 
96
129
  def delicious_posts
@@ -151,23 +184,16 @@ class Reddilicious
151
184
  puts "#{new_updates.length} urls after merging"
152
185
 
153
186
  new_updates.each do |update|
187
+ puts "importing #{update.description} (#{update.url})"
154
188
 
155
- blacklisted = @blacklist && @blacklist.blacklisted?(update.tags)
189
+ update.fetch_metadata!
156
190
 
157
- if blacklisted
158
- puts "ignoring #{update.description} due to (#{update.url}) is blacklisting (tags #{update.tags})"
159
- else
160
- puts "importing #{update.description} (#{update.url})"
161
-
162
- update.fetch_metadata!(false)
163
-
164
- res = Delicious.post("/posts/add", :query => update.to_h)
165
- if !res['result'] || res['result']['code'] != 'done'
166
- puts "error importing post: #{res.inspect}"
167
- end
168
-
169
- sleep(1)
191
+ res = Delicious.post("/posts/add", :query => update.to_h)
192
+ if !res['result'] || res['result']['code'] != 'done'
193
+ puts "error importing post: #{res.inspect}"
170
194
  end
195
+
196
+ sleep(1)
171
197
  end
172
198
  puts "Saving data to storage"
173
199
  sites.each{|x| x.save!}
@@ -193,3 +219,39 @@ class Reddilicious
193
219
  end
194
220
 
195
221
  end
222
+
223
+ class Migrations
224
+ Migrations = []
225
+
226
+
227
+ def initialize(gourmand)
228
+ @gourmand = gourmand
229
+ end
230
+
231
+ def self.migration(version, &migration)
232
+ Migrations << [Version.new(version), migration]
233
+ end
234
+
235
+ migration("0.0.6"){|gourmand|
236
+ # This version does nothing except introduce version information
237
+ # into the gourmand directory. It is covered by the normal migration
238
+ # behaviour of adding a version file at the end.
239
+ }
240
+
241
+ migration("0.0.8"){|gourmand|
242
+ old_tweets = File.join(gourmand.dir, "twitter", "posts.json")
243
+
244
+ if File.exists? old_tweets
245
+ FileUtils.mv(old_tweets, File.join(gourmand.dir, "twitter", "friends.json"))
246
+ end
247
+ }
248
+
249
+ def migrate!
250
+ Migrations.
251
+ sort{|x, y| x[0] <=> y[0]}.
252
+ reject{|x| (x[0] <= @gourmand.version) || (x[0] > Gourmand.version)}.each{|version, mig|
253
+ mig.call(@gourmand)
254
+ }
255
+ File.open(@gourmand.version_file, "w"){|o| o.puts Gourmand.version}
256
+ end
257
+ end
data/lib/post.rb CHANGED
@@ -15,7 +15,7 @@ class Post
15
15
  yield self if block_given?
16
16
  if hash
17
17
  hash.each do |key, value|
18
- instance_variable_set("@" + key, value)
18
+ instance_variable_set("@" + key.to_s, value)
19
19
  end
20
20
  end
21
21
  raise "all posts must have a URL" if !self.url
@@ -35,7 +35,7 @@ class Post
35
35
  if !self.tags then Set.new else Set[*self.tags.split] end
36
36
  end
37
37
 
38
- def fetch_metadata!(suggest_tags=true)
38
+ def fetch_metadata!()
39
39
  self.description ||= begin
40
40
  Nokogiri::HTML(open(url)).xpath("//title").text.gsub("\n", " ").gsub(/ +/, " ").strip
41
41
  rescue Exception => e
@@ -44,19 +44,23 @@ class Post
44
44
  end
45
45
 
46
46
  self.description = url if description.empty?
47
-
48
- if suggest_tags
49
- suggest = Delicious.get("/posts/suggest", :query => {:url => url} )
50
- if suggest['suggest']
51
- suggested_tags = suggest["suggest"]["popular"] || []
52
- self.tags = suggested_tags.is_a? Array ? suggested_tags.join(" ") : suggested_tags
53
- end
54
- sleep 1
55
- end
47
+ end
48
+
49
+ def ==(that)
50
+ (that.is_a?(Post) &&
51
+ (self.url == that.url) &&
52
+ (self.description == that.description) &&
53
+ (self.extended == that.extended) &&
54
+ (self.dt == that.dt) &&
55
+ (self.tag_set == that.tag_set)) || false
56
+ end
57
+
58
+ def dup
59
+ Post.new(self.to_h)
56
60
  end
57
61
 
58
62
  def merge(that)
59
- return self if !that
63
+ return self if that.nil?
60
64
  raise "cannot merge posts with different URLS: #{self.url} != #{that.url}" if self.url != that.url
61
65
 
62
66
  result = Post.new{|p|
@@ -84,7 +88,7 @@ class Post
84
88
  p.dt = [self.dt, that.dt].compact.min
85
89
  }
86
90
 
87
- result = nil if result == that # FIXME
91
+ result = nil if result == that
88
92
  result
89
93
  end
90
94
  end
data/lib/reddit.rb CHANGED
@@ -20,7 +20,10 @@ module Reddit
20
20
  new_results = nil
21
21
  after = nil
22
22
  i = 0
23
- while !(new_results = merge_results(Reddit.get("/user/#{credentials["user"].strip}/liked/.json", :query => {"after" => after})["data"]["children"].map{|x| x["data"]})).empty?
23
+
24
+ url = "/user/#{(credentials["username"] || credentials["user"]).strip}/liked/.json"
25
+ puts "fetching data from #{url}"
26
+ while !(new_results = merge_results(Reddit.get(url, :query => {"after" => after})["data"]["children"].map{|x| x["data"]})).empty?
24
27
  puts "fetching reddit page #{i}"
25
28
  results += new_results
26
29
  after = new_results[-1]["name"]
data/lib/twitter.rb CHANGED
@@ -1,11 +1,18 @@
1
1
  require "rubygems"
2
2
  require "httparty"
3
3
  require "gourmand"
4
+ require "time"
4
5
 
5
6
  module Twitter
6
7
  include HTTParty
7
8
  base_uri "http://twitter.com"
8
9
  format :json
10
+
11
+ class Timeline < Site
12
+ def initialize(timeline)
13
+ @timeline = timeline
14
+ end
15
+ end
9
16
 
10
17
  class FriendsTimeline < Site
11
18
  def name
@@ -16,9 +23,7 @@ module Twitter
16
23
  puts "Updating twitter"
17
24
  balance
18
25
 
19
- last_post_id = posts[0] && posts[0]["id"]
20
-
21
- results = []
26
+ results = {}
22
27
 
23
28
  new_tweets = nil
24
29
 
@@ -26,46 +31,49 @@ module Twitter
26
31
 
27
32
  query = {:count => 200 }
28
33
 
29
- query[:since_id] = last_post_id if last_post_id
30
-
31
34
  while !(new_tweets = get_tweets(query)).empty?
32
- results += new_tweets
35
+ new_tweets.reject!{|t| @ids.include? identifier(t) }
36
+ break if new_tweets.empty?
37
+ new_tweets.each { |t| results[identifier(t)] = t }
33
38
  puts "importing twitter page #{query[:page] || 0}"
34
39
  query[:page] = (query[:page] || 0) + 1
35
40
  end
36
-
37
- @posts += results
38
41
 
39
- results.map do |res|
40
- urls = res["text"].scan(/(http:\/\/[^,()" ]+)/).flatten
41
- ats = res["text"].scan(/@([[:alnum:]]+)/).flatten
42
- hashtags = res["text"].scan(/#([[:alnum:]]+)/).flatten
43
- retweet = res["text"] =~ /RT[^a-zA-Z]/ || res["text"] =~ /\(via @[^)]+\)/
42
+ @posts += results.values
44
43
 
45
- urls.map do |url|
46
- post = @gourmand.bookmark_for(url)
47
- post.tags = [
48
- "via:twitter",
49
- Post::NEW_MARKER,
50
- ats.map{|a| "to:" + a}.sort,
51
- "from:#{res["user"]["screen_name"]}",
52
- hashtags,
53
- ("retweet" if retweet),
54
- post.tags
55
- ].compact.flatten.join(" ").strip
56
-
57
- post.extended = "@#{res["user"]["screen_name"]}: \"#{res["text"]}\" \n (from http://twitter.com/#{res["user"]["screen_name"]}/status/#{res["id"]})"
58
- post.dt = date(res).strftime("%Y-%m-%dT%H:%M:%SZ")
59
-
60
- post
61
- end
44
+ results.values.map do |res|
45
+ to_posts(res)
62
46
  end.flatten
63
47
  end
48
+
49
+ def to_posts(res)
50
+ urls = res["text"].scan(/(http:\/\/[^,()" ]+)/).flatten.map { |u| u.gsub(/[\.:]+\Z/, '') }.uniq
51
+ ats = res["text"].scan(/@([[:alnum:]]+)/).flatten
52
+ hashtags = res["text"].scan(/#([[:alnum:]]+)/).flatten
53
+ retweet = res["text"] =~ /RT[^a-zA-Z]/ || res["text"] =~ /\(via @[^)]+\)/
64
54
 
55
+ urls.map do |url|
56
+ post = @gourmand.bookmark_for(url)
57
+ post.tags = [
58
+ "via:twitter",
59
+ Post::NEW_MARKER,
60
+ ats.map{|a| "to:" + a}.sort,
61
+ "from:#{res["user"]["screen_name"]}",
62
+ hashtags,
63
+ ("retweet" if retweet),
64
+ post.tags
65
+ ].compact.flatten.join(" ").strip
66
+
67
+ post.extended = "@#{res["user"]["screen_name"]}: \"#{res["text"]}\" \n (from http://twitter.com/#{res["user"]["screen_name"]}/status/#{res["id"]})"
68
+ post.dt = date(res).strftime("%Y-%m-%dT%H:%M:%SZ")
69
+
70
+ post
71
+ end
72
+ end
65
73
 
66
74
  def get_tweets(query)
67
75
  begin
68
- res = Twitter.get("/statuses/friends_timeline.json", :query => query, :basic_auth => {:username => credentials["username"], :password => credentials["password"]})
76
+ res = Twitter.get("/favorites.json", :query => query, :basic_auth => {:username => credentials["username"], :password => credentials["password"]})
69
77
  raise "Error fetching timeline: '#{res['error']}'" if res.is_a?(Hash) && res['error']
70
78
  res
71
79
  rescue Crack::ParseError
@@ -73,16 +81,15 @@ module Twitter
73
81
  end
74
82
  end
75
83
 
76
-
77
84
  def identifier(post)
78
85
  post["id"]
79
86
  end
80
87
 
81
88
  def date(post)
82
- DateTime.parse(post["created_at"])
89
+ @date_cache ||= Hash.new { |h,k| h[k] = Time.parse(k) }
90
+ @date_cache[post['created_at']]
83
91
  end
84
92
 
85
-
86
93
  def ask_for_credentials
87
94
  puts "#{name} user name:"
88
95
  user = STDIN.gets.strip
data/lib/version.rb ADDED
@@ -0,0 +1,14 @@
1
+ class Version
2
+ include Comparable
3
+ attr_accessor :to_s, :parts
4
+
5
+ def initialize(string)
6
+ string.strip!
7
+ @to_s = string
8
+ @parts = string.split(".").map{|x| x.to_i}
9
+ end
10
+
11
+ def <=>(that)
12
+ self.parts <=> that.parts
13
+ end
14
+ end
data/spec/post_spec.rb ADDED
@@ -0,0 +1,80 @@
1
+ require File.join(File.dirname(__FILE__), 'spec_helper')
2
+ require "post"
3
+
4
+ describe Post do
5
+ post = Post.new do |p|
6
+ p.url = "http://www.google.com"
7
+ p.description = "Google"
8
+ p.tags = "search"
9
+ end
10
+
11
+ post2 = Post.new do |p|
12
+ p.url = "http://www.google.com"
13
+ p.description = "Google"
14
+ p.tags = "search"
15
+ end
16
+
17
+
18
+ post3 = Post.new do |p|
19
+ p.url = "http://www.cuteoverload.com"
20
+ p.description = "Cute Overload"
21
+ p.tags = "cute kittens"
22
+ end
23
+
24
+ describe "equality" do
25
+ it "should treat things with the same properties as equal" do
26
+ post.should == post
27
+ post.should == post2
28
+ end
29
+
30
+ it "should treat things with different properties as unequal" do
31
+ post.should_not == post3
32
+ end
33
+
34
+ it "should not be equal to nil" do
35
+ post.should_not == nil
36
+ end
37
+ end
38
+
39
+
40
+ describe "merging" do
41
+ later = post.dup
42
+ earlier = post.dup
43
+
44
+ later.dt = Time.now.strftime("%Y-%m-%dT%H:%M:%SZ")
45
+ earlier.dt = (Time.now - 3600 * 24).strftime("%Y-%m-%dT%H:%M:%SZ")
46
+
47
+
48
+ it "should return nil when merging equal posts" do
49
+ post.merge(post2).should be(nil)
50
+ end
51
+
52
+ it "should return a post with the minimum time stamp of the two" do
53
+ z = earlier.merge(later)
54
+
55
+ z.should_not == nil
56
+ z.dt.should == earlier.dt
57
+ end
58
+
59
+ it "should return self when merging with nil" do
60
+ post.merge(nil).should ==(post)
61
+ end
62
+
63
+ end
64
+
65
+ describe "dup" do
66
+ it "should produce an equal post" do
67
+ post.dup.should == post
68
+ end
69
+
70
+
71
+ it "should produce an independent post" do
72
+ p = post.dup
73
+ p.url = "http://www.google.co.uk"
74
+
75
+ post.url.should == "http://www.google.com"
76
+ end
77
+ end
78
+
79
+
80
+ end
@@ -0,0 +1,7 @@
1
+ require 'rubygems'
2
+ require 'spec'
3
+
4
+ $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
5
+
6
+ Spec::Runner.configure do |config|
7
+ end
@@ -0,0 +1,98 @@
1
+ require File.join(File.dirname(__FILE__), 'spec_helper')
2
+ require "twitter"
3
+ require "tmpdir"
4
+
5
+ describe Twitter::FriendsTimeline do
6
+
7
+ before do
8
+ gourmand = mock('Gourmand')
9
+ gourmand.stub!(:dir).and_return(Dir::tmpdir)
10
+ gourmand.stub!(:bookmark_for).and_return { |url| Post.new :url=>url }
11
+ @twitter = Twitter::FriendsTimeline.new(gourmand)
12
+ @tweets = [{
13
+ 'id' => '12345',
14
+ 'text' => '@foo @bar kittens http://kittens.com http://morekittens.com http://kittens.com #cute #kittens',
15
+ 'created_at' => '1984-09-01T14:21:31Z',
16
+ 'user' => { 'screen_name' => 'baz', 'id' => 'baz_id' }
17
+ }, {
18
+ 'id' => '12346',
19
+ 'text'=> 'RT: check out this awesum link: http://awesome.com/awesome-story',
20
+ 'created_at' => '1984-09-01T14:21:31Z',
21
+ 'user' => { 'screen_name' => 'mongo', 'id' => 'mongo_id' }
22
+ }, {
23
+ 'id' => '12347',
24
+ 'text'=> 'this is a tweetie-style retweet: http://awesome.com/awesome-story (via @someone)',
25
+ 'created_at' => '1984-09-01T14:21:31Z',
26
+ 'user' => { 'screen_name' => 'mongo', 'id' => 'mongo_id' }
27
+ }, {
28
+ 'id' => '12348',
29
+ 'text'=> 'this is a tweet with punctuation after the link: http://bit.ly/das232...',
30
+ 'created_at' => '1984-09-01T14:21:31Z',
31
+ 'user' => { 'screen_name' => 'mongo', 'id' => 'mongo_id' }
32
+ }]
33
+ end
34
+
35
+
36
+
37
+ describe "to_posts" do
38
+
39
+ before do
40
+ @posts = @tweets.map { |t| @twitter.to_posts(t) }
41
+ end
42
+
43
+ it "should convert a tweet to one or more posts, one for each unique url" do
44
+ @posts[0].size.should == 2
45
+ @posts[0].map { |p| p.url }.sort.should == ["http://kittens.com", "http://morekittens.com"]
46
+ end
47
+
48
+ it "should correctly set the timestamp" do
49
+ @posts[0][0].dt.should == '1984-09-01T14:21:31Z'
50
+ end
51
+
52
+ it "should convert hashtags into delicious tags" do
53
+ @posts[0][0].tag_set.should include('kittens')
54
+ @posts[0][0].tag_set.should include('cute')
55
+ end
56
+
57
+ it "should add tags for all at's" do
58
+ ['to:foo', 'to:bar'].each { |t| @posts[0][0].tag_set.should include(t) }
59
+ end
60
+
61
+ it "should add tags for sender" do
62
+ @posts[0][0].tag_set.should include('from:baz')
63
+ end
64
+
65
+ it "should add a retweet tag if it's a retweet" do
66
+ @posts[0][0].tag_set.should_not include('retweet')
67
+ @posts[1][0].tag_set.should include('retweet')
68
+ @posts[2][0].tag_set.should include('retweet')
69
+ end
70
+
71
+ it "should deal with punctuation after urls" do
72
+ @posts[3][0].url.should == 'http://bit.ly/das232'
73
+ end
74
+
75
+ it "should be tagged as new post by default" do
76
+ @posts.flatten.all? {|p| p.tag_set.should include(Post::NEW_MARKER) }.should be_true
77
+ end
78
+
79
+ it "should be tagged as imported via twitter" do
80
+ @posts.flatten.all? {|p| p.tag_set.should include('via:twitter') }.should be_true
81
+ end
82
+
83
+ it "should have an extended info with context" do
84
+ @posts[0][0].extended.should include(@tweets[0]['text'])
85
+ @posts[0][0].extended.should include("(from http://twitter.com/baz/status/12345)")
86
+ end
87
+ end
88
+
89
+ describe "update!" do
90
+ it "should ignore duplicates" do
91
+ @twitter.stub!(:get_tweets).and_return { |query|
92
+ query[:page].to_i > 0 ? [] : (@tweets * 2)
93
+ }
94
+ posts = @twitter.update!
95
+ posts.size.should ==@tweets.size + 1
96
+ end
97
+ end
98
+ end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: DRMacIver-gourmand
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.3
4
+ version: 0.0.9
5
5
  platform: ruby
6
6
  authors:
7
7
  - David R. MacIver
@@ -9,7 +9,7 @@ autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
11
 
12
- date: 2009-07-17 00:00:00 -07:00
12
+ date: 2009-08-15 00:00:00 -07:00
13
13
  default_executable: gourmand
14
14
  dependencies:
15
15
  - !ruby/object:Gem::Dependency
@@ -62,13 +62,13 @@ extra_rdoc_files:
62
62
  - LICENSE
63
63
  - README.markdown
64
64
  files:
65
+ - .gitignore
65
66
  - LICENSE
66
67
  - README.markdown
67
68
  - Rakefile
68
69
  - VERSION
69
70
  - bin/gourmand
70
71
  - gourmand.gemspec
71
- - lib/blacklist.rb
72
72
  - lib/delicious.rb
73
73
  - lib/gourmand.rb
74
74
  - lib/post.rb
@@ -76,8 +76,13 @@ files:
76
76
  - lib/site.rb
77
77
  - lib/stumbleupon.rb
78
78
  - lib/twitter.rb
79
+ - lib/version.rb
80
+ - spec/post_spec.rb
81
+ - spec/spec_helper.rb
82
+ - spec/twitter_spec.rb
79
83
  has_rdoc: false
80
84
  homepage: http://github.com/DRMacIver/gourmand
85
+ licenses:
81
86
  post_install_message:
82
87
  rdoc_options:
83
88
  - --charset=UTF-8
@@ -98,9 +103,11 @@ required_rubygems_version: !ruby/object:Gem::Requirement
98
103
  requirements: []
99
104
 
100
105
  rubyforge_project:
101
- rubygems_version: 1.2.0
106
+ rubygems_version: 1.3.5
102
107
  signing_key:
103
108
  specification_version: 3
104
109
  summary: gourmand is a tool for automatically importing links into delicious
105
- test_files: []
106
-
110
+ test_files:
111
+ - spec/spec_helper.rb
112
+ - spec/post_spec.rb
113
+ - spec/twitter_spec.rb
data/lib/blacklist.rb DELETED
@@ -1,44 +0,0 @@
1
- class Blacklist
2
- def initialize(blacklist)
3
- @blacklist = Hash.new{|h, k| h[k] = [] }
4
-
5
- blacklist.each { |list|
6
- list.each { |tag|
7
- @blacklist[tag] << list
8
- }
9
- }
10
- end
11
-
12
- def self.from_file(file)
13
- return nil if !File.exists?(file)
14
- Blacklist.new(IO.read(file).split("\n").map{|l| l.split})
15
- end
16
-
17
- def blacklisted?(tags)
18
- if tags.is_a? String
19
- tags = tags.split
20
- end
21
-
22
- if !tags.is_a? Array
23
- raise "unrecognised argument #{tags.inspect}"
24
- end
25
-
26
- shallow_flatten(tags.
27
- map{|t| @blacklist[t]}.
28
- compact).
29
- any?{|set|
30
- !set.empty? && set.all?{|t|
31
- tags.include?(t)
32
- }
33
- }
34
- end
35
-
36
- private
37
-
38
- def shallow_flatten(enum)
39
- it = []
40
- enum.each{|x| it += x }
41
- it
42
- end
43
-
44
- end