feedjira 1.3.1 → 1.4.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.travis.yml +5 -0
- data/CHANGELOG.md +14 -0
- data/Gemfile +1 -0
- data/feedjira.gemspec +4 -4
- data/lib/feedjira/feed_utilities.rb +2 -1
- data/lib/feedjira/version.rb +1 -1
- data/spec/feedjira/feed_entry_utilities_spec.rb +11 -11
- data/spec/feedjira/feed_spec.rb +165 -164
- data/spec/feedjira/feed_utilities_spec.rb +35 -35
- data/spec/feedjira/parser/atom_entry_spec.rb +22 -26
- data/spec/feedjira/parser/atom_feed_burner_entry_spec.rb +9 -9
- data/spec/feedjira/parser/atom_feed_burner_spec.rb +12 -12
- data/spec/feedjira/parser/atom_spec.rb +20 -18
- data/spec/feedjira/parser/google_docs_atom_entry_spec.rb +3 -3
- data/spec/feedjira/parser/google_docs_atom_spec.rb +5 -5
- data/spec/feedjira/parser/itunes_rss_item_spec.rb +15 -15
- data/spec/feedjira/parser/itunes_rss_owner_spec.rb +2 -2
- data/spec/feedjira/parser/itunes_rss_spec.rb +15 -15
- data/spec/feedjira/parser/rss_entry_spec.rb +18 -23
- data/spec/feedjira/parser/rss_feed_burner_entry_spec.rb +18 -23
- data/spec/feedjira/parser/rss_feed_burner_spec.rb +13 -13
- data/spec/feedjira/parser/rss_spec.rb +11 -11
- data/spec/feedjira/preprocessor_spec.rb +2 -2
- data/spec/sample_feeds/itunes.xml +0 -1
- data/spec/spec_helper.rb +2 -0
- metadata +11 -11
@@ -12,7 +12,7 @@ describe Feedjira::FeedUtilities do
|
|
12
12
|
context "when the flag is not set" do
|
13
13
|
it "does not call the preprocessing method" do
|
14
14
|
@klass.preprocess_xml = false
|
15
|
-
@klass.
|
15
|
+
expect(@klass).to_not receive :preprocess
|
16
16
|
@klass.parse sample_rss_feed
|
17
17
|
end
|
18
18
|
end
|
@@ -20,7 +20,7 @@ describe Feedjira::FeedUtilities do
|
|
20
20
|
context "when the flag is set" do
|
21
21
|
it "calls the preprocessing method" do
|
22
22
|
@klass.preprocess_xml = true
|
23
|
-
@klass.
|
23
|
+
expect(@klass).to receive(:preprocess).
|
24
24
|
and_return sample_rss_feed
|
25
25
|
@klass.parse sample_rss_feed
|
26
26
|
end
|
@@ -30,36 +30,36 @@ describe Feedjira::FeedUtilities do
|
|
30
30
|
describe "instance methods" do
|
31
31
|
it "should provide an updated? accessor" do
|
32
32
|
feed = @klass.new
|
33
|
-
feed.
|
33
|
+
expect(feed).to_not be_updated
|
34
34
|
feed.updated = true
|
35
|
-
feed.
|
35
|
+
expect(feed).to be_updated
|
36
36
|
end
|
37
37
|
|
38
38
|
it "should provide a new_entries accessor" do
|
39
39
|
feed = @klass.new
|
40
|
-
feed.new_entries.
|
40
|
+
expect(feed.new_entries).to eq []
|
41
41
|
feed.new_entries = [:foo]
|
42
|
-
feed.new_entries.
|
42
|
+
expect(feed.new_entries).to eq [:foo]
|
43
43
|
end
|
44
44
|
|
45
45
|
it "should provide an etag accessor" do
|
46
46
|
feed = @klass.new
|
47
47
|
feed.etag = "foo"
|
48
|
-
feed.etag.
|
48
|
+
expect(feed.etag).to eq "foo"
|
49
49
|
end
|
50
50
|
|
51
51
|
it "should provide a last_modified accessor" do
|
52
52
|
feed = @klass.new
|
53
53
|
time = Time.now
|
54
54
|
feed.last_modified = time
|
55
|
-
feed.last_modified.
|
56
|
-
feed.last_modified.class.
|
55
|
+
expect(feed.last_modified).to eq time
|
56
|
+
expect(feed.last_modified.class).to eq Time
|
57
57
|
end
|
58
58
|
|
59
59
|
it "should return new_entries? as true when entries are put into new_entries" do
|
60
60
|
feed = @klass.new
|
61
61
|
feed.new_entries << :foo
|
62
|
-
feed.
|
62
|
+
expect(feed).to have_new_entries
|
63
63
|
end
|
64
64
|
|
65
65
|
it "should return a last_modified value from the entry with the most recent published date if the last_modified date hasn't been set" do
|
@@ -67,7 +67,7 @@ describe Feedjira::FeedUtilities do
|
|
67
67
|
entry =Feedjira::Parser::AtomEntry.new
|
68
68
|
entry.published = Time.now.to_s
|
69
69
|
feed.entries << entry
|
70
|
-
feed.last_modified.
|
70
|
+
expect(feed.last_modified).to eq entry.published
|
71
71
|
end
|
72
72
|
|
73
73
|
it "should not throw an error if one of the entries has published date of nil" do
|
@@ -76,7 +76,7 @@ describe Feedjira::FeedUtilities do
|
|
76
76
|
entry.published = Time.now.to_s
|
77
77
|
feed.entries << entry
|
78
78
|
feed.entries << Feedjira::Parser::AtomEntry.new
|
79
|
-
feed.last_modified.
|
79
|
+
expect(feed.last_modified).to eq entry.published
|
80
80
|
end
|
81
81
|
end
|
82
82
|
|
@@ -96,36 +96,36 @@ describe Feedjira::FeedUtilities do
|
|
96
96
|
it "should update the title if changed" do
|
97
97
|
@updated_feed.title = "new title"
|
98
98
|
@feed.update_from_feed(@updated_feed)
|
99
|
-
@feed.title.
|
100
|
-
@feed.
|
99
|
+
expect(@feed.title).to eq @updated_feed.title
|
100
|
+
expect(@feed).to be_updated
|
101
101
|
end
|
102
102
|
|
103
103
|
it "should not update the title if the same" do
|
104
104
|
@feed.update_from_feed(@updated_feed)
|
105
|
-
@feed.
|
105
|
+
expect(@feed).to_not be_updated
|
106
106
|
end
|
107
107
|
|
108
108
|
it "should update the feed_url if changed" do
|
109
109
|
@updated_feed.feed_url = "a new feed url"
|
110
110
|
@feed.update_from_feed(@updated_feed)
|
111
|
-
@feed.feed_url.
|
112
|
-
@feed.
|
111
|
+
expect(@feed.feed_url).to eq @updated_feed.feed_url
|
112
|
+
expect(@feed).to be_updated
|
113
113
|
end
|
114
114
|
|
115
115
|
it "should not update the feed_url if the same" do
|
116
116
|
@feed.update_from_feed(@updated_feed)
|
117
|
-
@feed.
|
117
|
+
expect(@feed).to_not be_updated
|
118
118
|
end
|
119
119
|
|
120
120
|
it "should update the url if changed" do
|
121
121
|
@updated_feed.url = "a new url"
|
122
122
|
@feed.update_from_feed(@updated_feed)
|
123
|
-
@feed.url.
|
123
|
+
expect(@feed.url).to eq @updated_feed.url
|
124
124
|
end
|
125
125
|
|
126
126
|
it "should not update the url if not changed" do
|
127
127
|
@feed.update_from_feed(@updated_feed)
|
128
|
-
@feed.
|
128
|
+
expect(@feed).to_not be_updated
|
129
129
|
end
|
130
130
|
end
|
131
131
|
|
@@ -154,18 +154,18 @@ describe Feedjira::FeedUtilities do
|
|
154
154
|
|
155
155
|
it "should update last-modified from the latest entry date" do
|
156
156
|
@feed.update_from_feed(@updated_feed)
|
157
|
-
@feed.last_modified.
|
157
|
+
expect(@feed.last_modified).to eq @new_entry.published
|
158
158
|
end
|
159
159
|
|
160
160
|
it "should put new entries into new_entries" do
|
161
161
|
@feed.update_from_feed(@updated_feed)
|
162
|
-
@feed.new_entries.
|
162
|
+
expect(@feed.new_entries).to eq [@new_entry]
|
163
163
|
end
|
164
164
|
|
165
165
|
it "should also put new entries into the entries collection" do
|
166
166
|
@feed.update_from_feed(@updated_feed)
|
167
|
-
@feed.entries.
|
168
|
-
@feed.entries.
|
167
|
+
expect(@feed.entries).to include(@new_entry)
|
168
|
+
expect(@feed.entries).to include(@old_entry)
|
169
169
|
end
|
170
170
|
end
|
171
171
|
|
@@ -207,10 +207,10 @@ describe Feedjira::FeedUtilities do
|
|
207
207
|
context "changing the url of an existing entry" do
|
208
208
|
it "should not put the complete feed into new_entries" do
|
209
209
|
@feed.update_from_feed(@updated_feed)
|
210
|
-
@feed.new_entries.
|
211
|
-
@feed.new_entries.
|
212
|
-
@feed.new_entries.size.
|
213
|
-
@feed.new_entries.size.
|
210
|
+
expect(@feed.new_entries).to_not include(@entry_changed_url)
|
211
|
+
expect(@feed.new_entries).to_not include(@old_entry)
|
212
|
+
expect(@feed.new_entries.size).to eq 0
|
213
|
+
expect(@feed.new_entries.size).to_not eq 2
|
214
214
|
end
|
215
215
|
end
|
216
216
|
|
@@ -220,10 +220,10 @@ describe Feedjira::FeedUtilities do
|
|
220
220
|
|
221
221
|
it "should put the complete feed into new_entries" do
|
222
222
|
@feed.update_from_feed(@updated_feed)
|
223
|
-
@feed.new_entries.
|
224
|
-
@feed.new_entries.
|
225
|
-
@feed.new_entries.size.
|
226
|
-
@feed.new_entries.size.
|
223
|
+
expect(@feed.new_entries).to include(@entry_changed_url)
|
224
|
+
expect(@feed.new_entries).to include(@old_entry)
|
225
|
+
expect(@feed.new_entries.size).to eq 2
|
226
|
+
expect(@feed.new_entries.size).to_not eq 0
|
227
227
|
end
|
228
228
|
end
|
229
229
|
end
|
@@ -248,7 +248,7 @@ describe Feedjira::FeedUtilities do
|
|
248
248
|
|
249
249
|
it 'finds entries with unique ids and urls' do
|
250
250
|
feed_one.update_from_feed feed_two
|
251
|
-
feed_one.new_entries.
|
251
|
+
expect(feed_one.new_entries).to eq [entry_two]
|
252
252
|
end
|
253
253
|
|
254
254
|
context 'when the entries have the same id' do
|
@@ -256,7 +256,7 @@ describe Feedjira::FeedUtilities do
|
|
256
256
|
|
257
257
|
it 'does not find a new entry' do
|
258
258
|
feed_one.update_from_feed feed_two
|
259
|
-
feed_one.new_entries.
|
259
|
+
expect(feed_one.new_entries).to eq []
|
260
260
|
end
|
261
261
|
end
|
262
262
|
|
@@ -265,7 +265,7 @@ describe Feedjira::FeedUtilities do
|
|
265
265
|
|
266
266
|
it 'does not find a new entry' do
|
267
267
|
feed_one.update_from_feed feed_two
|
268
|
-
feed_one.new_entries.
|
268
|
+
expect(feed_one.new_entries).to eq []
|
269
269
|
end
|
270
270
|
end
|
271
271
|
end
|
@@ -8,79 +8,75 @@ describe Feedjira::Parser::AtomEntry do
|
|
8
8
|
end
|
9
9
|
|
10
10
|
it "should parse the title" do
|
11
|
-
@entry.title.
|
11
|
+
expect(@entry.title).to eq "AWS Job: Architect & Designer Position in Turkey"
|
12
12
|
end
|
13
13
|
|
14
14
|
it "should parse the url" do
|
15
|
-
@entry.url.
|
15
|
+
expect(@entry.url).to eq "http://aws.typepad.com/aws/2009/01/aws-job-architect-designer-position-in-turkey.html"
|
16
16
|
end
|
17
17
|
|
18
18
|
it "should parse the url even when" do
|
19
|
-
Feedjira::Parser::Atom.parse(load_sample("atom_with_link_tag_for_url_unmarked.xml")).entries
|
19
|
+
entries = Feedjira::Parser::Atom.parse(load_sample("atom_with_link_tag_for_url_unmarked.xml")).entries
|
20
|
+
expect(entries.first.url).to eq "http://www.innoq.com/blog/phaus/2009/07/ja.html"
|
20
21
|
end
|
21
22
|
|
22
23
|
it "should parse the author" do
|
23
|
-
@entry.author.
|
24
|
+
expect(@entry.author).to eq "AWS Editor"
|
24
25
|
end
|
25
26
|
|
26
27
|
it "should parse the content" do
|
27
|
-
@entry.content.
|
28
|
+
expect(@entry.content).to eq sample_atom_entry_content
|
28
29
|
end
|
29
30
|
|
30
31
|
it "should provide a summary" do
|
31
|
-
@entry.summary.
|
32
|
+
expect(@entry.summary).to eq "Late last year an entrepreneur from Turkey visited me at Amazon HQ in Seattle. We talked about his plans to use AWS as part of his new social video portal startup. I won't spill any beans before he's ready to..."
|
32
33
|
end
|
33
34
|
|
34
35
|
it "should parse the published date" do
|
35
|
-
@entry.published.
|
36
|
+
expect(@entry.published).to eq Time.parse_safely("Fri Jan 16 18:21:00 UTC 2009")
|
36
37
|
end
|
37
38
|
|
38
39
|
it "should parse the categories" do
|
39
|
-
@entry.categories.
|
40
|
+
expect(@entry.categories).to eq ['Turkey', 'Seattle']
|
40
41
|
end
|
41
42
|
|
42
43
|
it "should parse the updated date" do
|
43
|
-
@entry.updated.
|
44
|
+
expect(@entry.updated).to eq Time.parse_safely("Fri Jan 16 18:21:00 UTC 2009")
|
44
45
|
end
|
45
46
|
|
46
47
|
it "should parse the id" do
|
47
|
-
@entry.id.
|
48
|
+
expect(@entry.id).to eq "tag:typepad.com,2003:post-61484736"
|
48
49
|
end
|
49
50
|
|
50
51
|
it "should support each" do
|
51
|
-
@entry.respond_to
|
52
|
+
expect(@entry).to respond_to :each
|
52
53
|
end
|
53
54
|
|
54
55
|
it "should be able to list out all fields with each" do
|
55
56
|
all_fields = []
|
56
|
-
@entry.each do |field, value|
|
57
|
-
all_fields << field
|
58
|
-
end
|
59
|
-
all_fields.sort == ['author', 'categories', 'content', 'id', 'published', 'summary', 'title', 'url']
|
60
|
-
end
|
61
|
-
|
62
|
-
it "should be able to list out all values with each" do
|
63
57
|
title_value = ''
|
58
|
+
|
64
59
|
@entry.each do |field, value|
|
60
|
+
all_fields << field
|
65
61
|
title_value = value if field == 'title'
|
66
62
|
end
|
67
|
-
|
63
|
+
|
64
|
+
expect(all_fields.sort).to eq ["author", "categories", "content", "entry_id", "links", "published", "summary", "title", "updated", "url"]
|
65
|
+
expect(title_value).to eq "AWS Job: Architect & Designer Position in Turkey"
|
68
66
|
end
|
69
67
|
|
70
68
|
it "should support checking if a field exists in the entry" do
|
71
|
-
@entry.include
|
69
|
+
expect(@entry).to include 'author'
|
70
|
+
expect(@entry).to include 'title'
|
72
71
|
end
|
73
72
|
|
74
73
|
it "should allow access to fields with hash syntax" do
|
75
|
-
@entry['title']
|
76
|
-
@entry['
|
77
|
-
@entry['author'] == @entry.author
|
78
|
-
@entry['author'].should == "AWS Editor"
|
74
|
+
expect(@entry['title']).to eq "AWS Job: Architect & Designer Position in Turkey"
|
75
|
+
expect(@entry['author']).to eq "AWS Editor"
|
79
76
|
end
|
80
77
|
|
81
78
|
it "should allow setting field values with hash syntax" do
|
82
79
|
@entry['title'] = "Foobar"
|
83
|
-
@entry.title.
|
80
|
+
expect(@entry.title).to eq "Foobar"
|
84
81
|
end
|
85
|
-
|
86
82
|
end
|
@@ -8,40 +8,40 @@ describe Feedjira::Parser::AtomFeedBurnerEntry do
|
|
8
8
|
end
|
9
9
|
|
10
10
|
it "should parse the title" do
|
11
|
-
@entry.title.
|
11
|
+
expect(@entry.title).to eq "Making a Ruby C library even faster"
|
12
12
|
end
|
13
13
|
|
14
14
|
it "should be able to fetch a url via the 'alternate' rel if no origLink exists" do
|
15
15
|
entry = Feedjira::Parser::AtomFeedBurner.parse(File.read("#{File.dirname(__FILE__)}/../../sample_feeds/PaulDixExplainsNothingAlternate.xml")).entries.first
|
16
|
-
entry.url.
|
16
|
+
expect(entry.url).to eq 'http://feeds.feedburner.com/~r/PaulDixExplainsNothing/~3/519925023/making-a-ruby-c-library-even-faster.html'
|
17
17
|
end
|
18
18
|
|
19
19
|
it "should parse the url" do
|
20
|
-
@entry.url.
|
20
|
+
expect(@entry.url).to eq "http://www.pauldix.net/2009/01/making-a-ruby-c-library-even-faster.html"
|
21
21
|
end
|
22
22
|
|
23
23
|
it "should parse the url when there is no alternate" do
|
24
24
|
entry = Feedjira::Parser::AtomFeedBurner.parse(File.read("#{File.dirname(__FILE__)}/../../sample_feeds/FeedBurnerUrlNoAlternate.xml")).entries.first
|
25
|
-
entry.url.
|
25
|
+
expect(entry.url).to eq 'http://example.com/QQQQ.html'
|
26
26
|
end
|
27
27
|
|
28
28
|
it "should parse the author" do
|
29
|
-
@entry.author.
|
29
|
+
expect(@entry.author).to eq "Paul Dix"
|
30
30
|
end
|
31
31
|
|
32
32
|
it "should parse the content" do
|
33
|
-
@entry.content.
|
33
|
+
expect(@entry.content).to eq sample_feedburner_atom_entry_content
|
34
34
|
end
|
35
35
|
|
36
36
|
it "should provide a summary" do
|
37
|
-
@entry.summary.
|
37
|
+
expect(@entry.summary).to eq "Last week I released the first version of a SAX based XML parsing library called SAX-Machine. It uses Nokogiri, which uses libxml, so it's pretty fast. However, I felt that it could be even faster. The only question was how..."
|
38
38
|
end
|
39
39
|
|
40
40
|
it "should parse the published date" do
|
41
|
-
@entry.published.
|
41
|
+
expect(@entry.published).to eq Time.parse_safely("Thu Jan 22 15:50:22 UTC 2009")
|
42
42
|
end
|
43
43
|
|
44
44
|
it "should parse the categories" do
|
45
|
-
@entry.categories.
|
45
|
+
expect(@entry.categories).to eq ['Ruby', 'Another Category']
|
46
46
|
end
|
47
47
|
end
|
@@ -3,19 +3,19 @@ require File.join(File.dirname(__FILE__), %w[.. .. spec_helper])
|
|
3
3
|
describe Feedjira::Parser::AtomFeedBurner do
|
4
4
|
describe "#will_parse?" do
|
5
5
|
it "should return true for a feedburner atom feed" do
|
6
|
-
Feedjira::Parser::AtomFeedBurner.
|
6
|
+
expect(Feedjira::Parser::AtomFeedBurner).to be_able_to_parse(sample_feedburner_atom_feed)
|
7
7
|
end
|
8
8
|
|
9
9
|
it "should return false for an rdf feed" do
|
10
|
-
Feedjira::Parser::AtomFeedBurner.
|
10
|
+
expect(Feedjira::Parser::AtomFeedBurner).to_not be_able_to_parse(sample_rdf_feed)
|
11
11
|
end
|
12
12
|
|
13
13
|
it "should return false for a regular atom feed" do
|
14
|
-
Feedjira::Parser::AtomFeedBurner.
|
14
|
+
expect(Feedjira::Parser::AtomFeedBurner).to_not be_able_to_parse(sample_atom_feed)
|
15
15
|
end
|
16
16
|
|
17
17
|
it "should return false for an rss feedburner feed" do
|
18
|
-
Feedjira::Parser::AtomFeedBurner.
|
18
|
+
expect(Feedjira::Parser::AtomFeedBurner).to_not be_able_to_parse(sample_rss_feed_burner_feed)
|
19
19
|
end
|
20
20
|
end
|
21
21
|
|
@@ -25,32 +25,32 @@ describe Feedjira::Parser::AtomFeedBurner do
|
|
25
25
|
end
|
26
26
|
|
27
27
|
it "should parse the title" do
|
28
|
-
@feed.title.
|
28
|
+
expect(@feed.title).to eq "Paul Dix Explains Nothing"
|
29
29
|
end
|
30
30
|
|
31
31
|
it "should parse the description" do
|
32
|
-
@feed.description.
|
32
|
+
expect(@feed.description).to eq "Entrepreneurship, programming, software development, politics, NYC, and random thoughts."
|
33
33
|
end
|
34
34
|
|
35
35
|
it "should parse the url" do
|
36
|
-
@feed.url.
|
36
|
+
expect(@feed.url).to eq "http://www.pauldix.net/"
|
37
37
|
end
|
38
38
|
|
39
39
|
it "should parse the feed_url" do
|
40
|
-
@feed.feed_url.
|
40
|
+
expect(@feed.feed_url).to eq "http://feeds.feedburner.com/PaulDixExplainsNothing"
|
41
41
|
end
|
42
42
|
|
43
43
|
it "should parse no hub urls" do
|
44
|
-
@feed.hubs.count.
|
44
|
+
expect(@feed.hubs.count).to eq 0
|
45
45
|
end
|
46
46
|
|
47
47
|
it "should parse hub urls" do
|
48
48
|
feed_with_hub = Feedjira::Parser::AtomFeedBurner.parse(load_sample("TypePadNews.xml"))
|
49
|
-
feed_with_hub.hubs.count.
|
49
|
+
expect(feed_with_hub.hubs.count).to eq 1
|
50
50
|
end
|
51
51
|
|
52
52
|
it "should parse entries" do
|
53
|
-
@feed.entries.size.
|
53
|
+
expect(@feed.entries.size).to eq 5
|
54
54
|
end
|
55
55
|
end
|
56
56
|
|
@@ -61,7 +61,7 @@ describe Feedjira::Parser::AtomFeedBurner do
|
|
61
61
|
feed = Feedjira::Parser::AtomFeedBurner.parse sample_feed_burner_atom_xhtml_feed
|
62
62
|
entry = feed.entries.first
|
63
63
|
|
64
|
-
entry.content.
|
64
|
+
expect(entry.content).to match /\A\<p/
|
65
65
|
end
|
66
66
|
end
|
67
67
|
end
|
@@ -3,19 +3,19 @@ require File.join(File.dirname(__FILE__), %w[.. .. spec_helper])
|
|
3
3
|
describe Feedjira::Parser::Atom do
|
4
4
|
describe "#will_parse?" do
|
5
5
|
it "should return true for an atom feed" do
|
6
|
-
Feedjira::Parser::Atom.
|
6
|
+
expect(Feedjira::Parser::Atom).to be_able_to_parse(sample_atom_feed)
|
7
7
|
end
|
8
8
|
|
9
9
|
it "should return false for an rdf feed" do
|
10
|
-
Feedjira::Parser::Atom.
|
10
|
+
expect(Feedjira::Parser::Atom).to_not be_able_to_parse(sample_rdf_feed)
|
11
11
|
end
|
12
12
|
|
13
13
|
it "should return false for an rss feedburner feed" do
|
14
|
-
Feedjira::Parser::Atom.
|
14
|
+
expect(Feedjira::Parser::Atom).to_not be_able_to_parse(sample_rss_feed_burner_feed)
|
15
15
|
end
|
16
16
|
|
17
17
|
it "should return true for an atom feed that has line breaks in between attributes in the <feed> node" do
|
18
|
-
Feedjira::Parser::Atom.
|
18
|
+
expect(Feedjira::Parser::Atom).to be_able_to_parse(sample_atom_feed_line_breaks)
|
19
19
|
end
|
20
20
|
end
|
21
21
|
|
@@ -25,41 +25,43 @@ describe Feedjira::Parser::Atom do
|
|
25
25
|
end
|
26
26
|
|
27
27
|
it "should parse the title" do
|
28
|
-
@feed.title.
|
28
|
+
expect(@feed.title).to eq "Amazon Web Services Blog"
|
29
29
|
end
|
30
30
|
|
31
31
|
it "should parse the description" do
|
32
|
-
@feed.description.
|
32
|
+
expect(@feed.description).to eq "Amazon Web Services, Products, Tools, and Developer Information..."
|
33
33
|
end
|
34
34
|
|
35
35
|
it "should parse the url" do
|
36
|
-
@feed.url.
|
36
|
+
expect(@feed.url).to eq "http://aws.typepad.com/aws/"
|
37
37
|
end
|
38
38
|
|
39
39
|
it "should parse the url even when it doesn't have the type='text/html' attribute" do
|
40
|
-
Feedjira::Parser::Atom.parse(load_sample("atom_with_link_tag_for_url_unmarked.xml"))
|
40
|
+
feed = Feedjira::Parser::Atom.parse(load_sample("atom_with_link_tag_for_url_unmarked.xml"))
|
41
|
+
expect(feed.url).to eq "http://www.innoq.com/planet/"
|
41
42
|
end
|
42
43
|
|
43
44
|
it "should parse the feed_url even when it doesn't have the type='application/atom+xml' attribute" do
|
44
|
-
Feedjira::Parser::Atom.parse(load_sample("atom_with_link_tag_for_url_unmarked.xml"))
|
45
|
+
feed = Feedjira::Parser::Atom.parse(load_sample("atom_with_link_tag_for_url_unmarked.xml"))
|
46
|
+
expect(feed.feed_url).to eq "http://www.innoq.com/planet/atom.xml"
|
45
47
|
end
|
46
48
|
|
47
49
|
it "should parse the feed_url" do
|
48
|
-
@feed.feed_url.
|
50
|
+
expect(@feed.feed_url).to eq "http://aws.typepad.com/aws/atom.xml"
|
49
51
|
end
|
50
52
|
|
51
53
|
it "should parse no hub urls" do
|
52
|
-
@feed.hubs.count.
|
54
|
+
expect(@feed.hubs.count).to eq 0
|
53
55
|
end
|
54
56
|
|
55
57
|
it "should parse the hub urls" do
|
56
58
|
feed_with_hub = Feedjira::Parser::Atom.parse(load_sample("SamRuby.xml"))
|
57
|
-
feed_with_hub.hubs.count.
|
58
|
-
feed_with_hub.hubs.first.
|
59
|
+
expect(feed_with_hub.hubs.count).to eq 1
|
60
|
+
expect(feed_with_hub.hubs.first).to eq "http://pubsubhubbub.appspot.com/"
|
59
61
|
end
|
60
62
|
|
61
63
|
it "should parse entries" do
|
62
|
-
@feed.entries.size.
|
64
|
+
expect(@feed.entries.size).to eq 10
|
63
65
|
end
|
64
66
|
end
|
65
67
|
|
@@ -70,7 +72,7 @@ describe Feedjira::Parser::Atom do
|
|
70
72
|
feed = Feedjira::Parser::Atom.parse sample_atom_xhtml_feed
|
71
73
|
entry = feed.entries.first
|
72
74
|
|
73
|
-
entry.content.
|
75
|
+
expect(entry.content).to match /\A\<p/
|
74
76
|
end
|
75
77
|
|
76
78
|
it "should not duplicate content when there are divs in content" do
|
@@ -78,7 +80,7 @@ describe Feedjira::Parser::Atom do
|
|
78
80
|
|
79
81
|
feed = Feedjira::Parser::Atom.parse sample_duplicate_content_atom_feed
|
80
82
|
content = Nokogiri::HTML(feed.entries[1].content)
|
81
|
-
content.css('img').length.
|
83
|
+
expect(content.css('img').length).to eq 11
|
82
84
|
end
|
83
85
|
end
|
84
86
|
|
@@ -88,11 +90,11 @@ describe Feedjira::Parser::Atom do
|
|
88
90
|
end
|
89
91
|
|
90
92
|
it "should parse url" do
|
91
|
-
@feed.url.
|
93
|
+
expect(@feed.url).to eq "http://feedjira.com/blog"
|
92
94
|
end
|
93
95
|
|
94
96
|
it "should parse feed url" do
|
95
|
-
@feed.feed_url.
|
97
|
+
expect(@feed.feed_url).to eq "http://feedjira.com/blog/feed.xml"
|
96
98
|
end
|
97
99
|
end
|
98
100
|
end
|
@@ -8,15 +8,15 @@ describe Feedjira::Parser::GoogleDocsAtomEntry do
|
|
8
8
|
end
|
9
9
|
|
10
10
|
it 'should have the custom checksum element' do
|
11
|
-
@entry.checksum.
|
11
|
+
expect(@entry.checksum).to eq '2b01142f7481c7b056c4b410d28f33cf'
|
12
12
|
end
|
13
13
|
|
14
14
|
it 'should have the custom filename element' do
|
15
|
-
@entry.original_filename.
|
15
|
+
expect(@entry.original_filename).to eq "MyFile.pdf"
|
16
16
|
end
|
17
17
|
|
18
18
|
it 'should have the custom suggested filename element' do
|
19
|
-
@entry.suggested_filename.
|
19
|
+
expect(@entry.suggested_filename).to eq "TaxDocument.pdf"
|
20
20
|
end
|
21
21
|
end
|
22
22
|
end
|
@@ -3,11 +3,11 @@ require File.join(File.dirname(__FILE__), %w[.. .. spec_helper])
|
|
3
3
|
describe Feedjira::Parser::GoogleDocsAtom do
|
4
4
|
describe '.able_to_parser?' do
|
5
5
|
it 'should return true for Google Docs feed' do
|
6
|
-
Feedjira::Parser::GoogleDocsAtom.
|
6
|
+
expect(Feedjira::Parser::GoogleDocsAtom).to be_able_to_parse(sample_google_docs_list_feed)
|
7
7
|
end
|
8
8
|
|
9
9
|
it 'should not be able to parse another Atom feed' do
|
10
|
-
Feedjira::Parser::GoogleDocsAtom.
|
10
|
+
expect(Feedjira::Parser::GoogleDocsAtom).to_not be_able_to_parse(sample_atom_feed)
|
11
11
|
end
|
12
12
|
end
|
13
13
|
|
@@ -17,15 +17,15 @@ describe Feedjira::Parser::GoogleDocsAtom do
|
|
17
17
|
end
|
18
18
|
|
19
19
|
it 'should return a bunch of objects' do
|
20
|
-
@feed.entries.
|
20
|
+
expect(@feed.entries).to_not be_empty
|
21
21
|
end
|
22
22
|
|
23
23
|
it 'should populate a title, interhited from the Atom entry' do
|
24
|
-
@feed.title.
|
24
|
+
expect(@feed.title).to_not be_nil
|
25
25
|
end
|
26
26
|
|
27
27
|
it 'should return a bunch of entries of type GoogleDocsAtomEntry' do
|
28
|
-
@feed.entries.first.
|
28
|
+
expect(@feed.entries.first).to be_a Feedjira::Parser::GoogleDocsAtomEntry
|
29
29
|
end
|
30
30
|
end
|
31
31
|
end
|