fandango 0.1.3 → 0.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. data/fandango.gemspec +19 -2
  2. data/lib/fandango/version.rb +1 -1
  3. data/lib/feedzirra.rb +4 -0
  4. data/lib/vendor/feedzirra/.gitignore +6 -0
  5. data/lib/vendor/feedzirra/.rspec +1 -0
  6. data/lib/vendor/feedzirra/lib/feedzirra/core_ext/date.rb +19 -0
  7. data/lib/vendor/feedzirra/lib/feedzirra/core_ext/string.rb +9 -0
  8. data/lib/vendor/feedzirra/lib/feedzirra/core_ext.rb +3 -0
  9. data/lib/vendor/feedzirra/lib/feedzirra/feed.rb +383 -0
  10. data/lib/vendor/feedzirra/lib/feedzirra/feed_entry_utilities.rb +65 -0
  11. data/lib/vendor/feedzirra/lib/feedzirra/feed_utilities.rb +72 -0
  12. data/lib/vendor/feedzirra/lib/feedzirra/parser/atom.rb +29 -0
  13. data/lib/vendor/feedzirra/lib/feedzirra/parser/atom_entry.rb +30 -0
  14. data/lib/vendor/feedzirra/lib/feedzirra/parser/atom_feed_burner.rb +21 -0
  15. data/lib/vendor/feedzirra/lib/feedzirra/parser/atom_feed_burner_entry.rb +31 -0
  16. data/lib/vendor/feedzirra/lib/feedzirra/parser/itunes_rss.rb +50 -0
  17. data/lib/vendor/feedzirra/lib/feedzirra/parser/itunes_rss_item.rb +32 -0
  18. data/lib/vendor/feedzirra/lib/feedzirra/parser/itunes_rss_owner.rb +12 -0
  19. data/lib/vendor/feedzirra/lib/feedzirra/parser/rss.rb +22 -0
  20. data/lib/vendor/feedzirra/lib/feedzirra/parser/rss_entry.rb +34 -0
  21. data/lib/vendor/feedzirra/lib/feedzirra/parser/rss_feed_burner.rb +22 -0
  22. data/lib/vendor/feedzirra/lib/feedzirra/parser/rss_feed_burner_entry.rb +40 -0
  23. data/lib/vendor/feedzirra/lib/feedzirra/parser.rb +17 -0
  24. data/lib/vendor/feedzirra/lib/feedzirra/version.rb +3 -0
  25. data/lib/vendor/feedzirra/lib/feedzirra.rb +19 -0
  26. metadata +127 -16
data/fandango.gemspec CHANGED
@@ -18,8 +18,25 @@ Gem::Specification.new do |s|
18
18
  s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
19
19
  s.require_paths = ["lib"]
20
20
 
21
- s.add_runtime_dependency 'activesupport', '> 3.0.8', '< 3.2.0'
22
- s.add_runtime_dependency 'feedzirra', '0.1.1'
21
+ s.add_runtime_dependency 'activesupport', '>= 3.0.8', '< 3.2.0'
22
+ # Feedzirra 0.1.1 lists builder ~= 2.1.2,
23
+ # but it works with builder >= 2.1.2.
24
+ # ActiveSupport 3 depends on builder 3.
25
+ # So we have to make a local copy of feedzirra until a newer gem is released.
26
+ # And we have to include dependencies manually.
27
+ # See https://github.com/pauldix/feedzirra/issues/77.
28
+ # s.add_runtime_dependency 'feedzirra', '0.1.2'
29
+
30
+ # Following dependencies copied from feedzirra's gemspec.
31
+ s.add_runtime_dependency 'nokogiri', ['>= 1.4.4']
32
+ s.add_runtime_dependency 'sax-machine', ['~> 0.1.0']
33
+ s.add_runtime_dependency 'curb', ['~> 0.7.15']
34
+ s.add_runtime_dependency 'builder', ['>= 2.1.2']
35
+ s.add_runtime_dependency 'activesupport', ['>= 3.0.8']
36
+ s.add_runtime_dependency 'loofah', ['~> 1.2.0']
37
+ s.add_runtime_dependency 'rdoc', ['~> 3.8']
38
+ s.add_runtime_dependency 'rake', ['>= 0.8.7']
39
+ s.add_runtime_dependency 'i18n', ['>= 0.5.0']
23
40
 
24
41
  s.add_development_dependency 'awesome_print'
25
42
  s.add_development_dependency 'mocha', '0.10.3'
@@ -1,3 +1,3 @@
1
1
  module Fandango
2
- VERSION = "0.1.3"
2
+ VERSION = "0.1.4"
3
3
  end
data/lib/feedzirra.rb ADDED
@@ -0,0 +1,4 @@
1
+ # See fandango.gemspec.
2
+
3
+ $LOAD_PATH << File.dirname(__FILE__) +'/vendor/feedzirra/lib'
4
+ require 'vendor/feedzirra/lib/feedzirra'
@@ -0,0 +1,6 @@
1
+ .DS_Store
2
+ .rvm
3
+ TODO
4
+ Gemfile.lock
5
+ rdoc/
6
+ doc/
@@ -0,0 +1 @@
1
+ --color
@@ -0,0 +1,19 @@
1
+ # Date code pulled and adapted from:
2
+ # Ruby Cookbook by Lucas Carlson and Leonard Richardson
3
+ # Published by O'Reilly
4
+ # ISBN: 0-596-52369-6
5
+ class Date
6
+ def feed_utils_to_gm_time
7
+ feed_utils_to_time(new_offset, :gm)
8
+ end
9
+
10
+ def feed_utils_to_local_time
11
+ feed_utils_to_time(new_offset(DateTime.now.offset-offset), :local)
12
+ end
13
+
14
+ private
15
+ def feed_utils_to_time(dest, method)
16
+ Time.send(method, dest.year, dest.month, dest.day, dest.hour, dest.min,
17
+ dest.sec, dest.zone)
18
+ end
19
+ end
@@ -0,0 +1,9 @@
1
+ class String
2
+ def sanitize!
3
+ self.replace(sanitize)
4
+ end
5
+
6
+ def sanitize
7
+ Loofah.scrub_fragment(self, :prune).to_s
8
+ end
9
+ end
@@ -0,0 +1,3 @@
1
+ Dir["#{File.dirname(__FILE__)}/core_ext/*.rb"].sort.each do |path|
2
+ require "feedzirra/core_ext/#{File.basename(path, '.rb')}"
3
+ end
@@ -0,0 +1,383 @@
1
+ module Feedzirra
2
+ class NoParserAvailable < StandardError; end
3
+
4
+ class Feed
5
+ USER_AGENT = "feedzirra http://github.com/pauldix/feedzirra/tree/master"
6
+
7
+ # Takes a raw XML feed and attempts to parse it. If no parser is available a Feedzirra::NoParserAvailable exception is raised.
8
+ # You can pass a block to be called when there's an error during the parsing.
9
+ # === Parameters
10
+ # [xml<String>] The XML that you would like parsed.
11
+ # === Returns
12
+ # An instance of the determined feed type. By default a Feedzirra::Atom, Feedzirra::AtomFeedBurner, Feedzirra::RDF, or Feedzirra::RSS object.
13
+ # === Raises
14
+ # Feedzirra::NoParserAvailable : If no valid parser classes could be found for the feed.
15
+ def self.parse(xml, &block)
16
+ if parser = determine_feed_parser_for_xml(xml)
17
+ parser.parse(xml, block)
18
+ else
19
+ raise NoParserAvailable.new("No valid parser for XML.")
20
+ end
21
+ end
22
+
23
+ # Determines the correct parser class to use for parsing the feed.
24
+ #
25
+ # === Parameters
26
+ # [xml<String>] The XML that you would like determine the parser for.
27
+ # === Returns
28
+ # The class name of the parser that can handle the XML.
29
+ def self.determine_feed_parser_for_xml(xml)
30
+ start_of_doc = xml.slice(0, 2000)
31
+ feed_classes.detect {|klass| klass.able_to_parse?(start_of_doc)}
32
+ end
33
+
34
+ # Adds a new feed parsing class that will be used for parsing.
35
+ #
36
+ # === Parameters
37
+ # [klass<Constant>] The class/constant that you want to register.
38
+ # === Returns
39
+ # A updated array of feed parser class names.
40
+ def self.add_feed_class(klass)
41
+ feed_classes.unshift klass
42
+ end
43
+
44
+ # Provides a list of registered feed parsing classes.
45
+ #
46
+ # === Returns
47
+ # A array of class names.
48
+ def self.feed_classes
49
+ @feed_classes ||= [Feedzirra::Parser::RSSFeedBurner, Feedzirra::Parser::RSS, Feedzirra::Parser::AtomFeedBurner, Feedzirra::Parser::Atom]
50
+ end
51
+
52
+ # Makes all registered feeds types look for the passed in element to parse.
53
+ # This is actually just a call to element (a SAXMachine call) in the class.
54
+ #
55
+ # === Parameters
56
+ # [element_tag<String>] The element tag
57
+ # [options<Hash>] Valid keys are same as with SAXMachine
58
+ def self.add_common_feed_element(element_tag, options = {})
59
+ feed_classes.each do |k|
60
+ k.element element_tag, options
61
+ end
62
+ end
63
+
64
+ # Makes all registered feeds types look for the passed in elements to parse.
65
+ # This is actually just a call to elements (a SAXMachine call) in the class.
66
+ #
67
+ # === Parameters
68
+ # [element_tag<String>] The element tag
69
+ # [options<Hash>] Valid keys are same as with SAXMachine
70
+ def self.add_common_feed_elements(element_tag, options = {})
71
+ feed_classes.each do |k|
72
+ k.elements element_tag, options
73
+ end
74
+ end
75
+
76
+ # Makes all registered entry types look for the passed in element to parse.
77
+ # This is actually just a call to element (a SAXMachine call) in the class.
78
+ #
79
+ # === Parameters
80
+ # [element_tag<String>]
81
+ # [options<Hash>] Valid keys are same as with SAXMachine
82
+ def self.add_common_feed_entry_element(element_tag, options = {})
83
+ call_on_each_feed_entry :element, element_tag, options
84
+ end
85
+
86
+ # Makes all registered entry types look for the passed in elements to parse.
87
+ # This is actually just a call to element (a SAXMachine call) in the class.
88
+ #
89
+ # === Parameters
90
+ # [element_tag<String>]
91
+ # [options<Hash>] Valid keys are same as with SAXMachine
92
+ def self.add_common_feed_entry_elements(element_tag, options = {})
93
+ call_on_each_feed_entry :elements, element_tag, options
94
+ end
95
+
96
+ # Call a method on all feed entries classes.
97
+ #
98
+ # === Parameters
99
+ # [method<Symbol>] The method name
100
+ # [parameters<Array>] The method parameters
101
+ def self.call_on_each_feed_entry(method, *parameters)
102
+ feed_classes.each do |k|
103
+ # iterate on the collections defined in the sax collection
104
+ k.sax_config.collection_elements.each_value do |vl|
105
+ # vl is a list of CollectionConfig mapped to an attribute name
106
+ # we'll look for the one set as 'entries' and add the new element
107
+ vl.find_all{|v| (v.accessor == 'entries') && (v.data_class.class == Class)}.each do |v|
108
+ v.data_class.send(method, *parameters)
109
+ end
110
+ end
111
+ end
112
+ end
113
+
114
+ # Setup curl from options.
115
+ # Possible parameters:
116
+ # * :user_agent - overrides the default user agent.
117
+ # * :compress - any value to enable compression
118
+ # * :http_authentication - array containing http authentication parameters
119
+ # * :proxy_url - proxy url
120
+ # * :proxy_port - proxy port
121
+ # * :max_redirects - max number of redirections
122
+ # * :timeout - timeout
123
+ def self.setup_easy curl, options
124
+ curl.headers["Accept-encoding"] = 'gzip, deflate' if options.has_key?(:compress)
125
+ curl.headers["User-Agent"] = (options[:user_agent] || USER_AGENT)
126
+
127
+ curl.userpwd = options[:http_authentication].join(':') if options.has_key?(:http_authentication)
128
+ curl.proxy_url = options[:proxy_url] if options.has_key?(:proxy_url)
129
+ curl.proxy_port = options[:proxy_port] if options.has_key?(:proxy_port)
130
+ curl.max_redirects = options[:max_redirects] if options[:max_redirects]
131
+ curl.timeout = options[:timeout] if options[:timeout]
132
+
133
+ curl.follow_location = true
134
+ end
135
+
136
+ # Fetches and returns the raw XML for each URL provided.
137
+ #
138
+ # === Parameters
139
+ # [urls<String> or <Array>] A single feed URL, or an array of feed URLs.
140
+ # [options<Hash>] Valid keys for this argument as as followed:
141
+ # :if_modified_since - Time object representing when the feed was last updated.
142
+ # :if_none_match - String that's normally an etag for the request that was stored previously.
143
+ # :on_success - Block that gets executed after a successful request.
144
+ # :on_failure - Block that gets executed after a failed request.
145
+ # * all parameters defined in setup_easy
146
+ # === Returns
147
+ # A String of XML if a single URL is passed.
148
+ #
149
+ # A Hash if multiple URL's are passed. The key will be the URL, and the value the XML.
150
+ def self.fetch_raw(urls, options = {})
151
+ url_queue = [*urls]
152
+ multi = Curl::Multi.new
153
+ responses = {}
154
+ url_queue.each do |url|
155
+ easy = Curl::Easy.new(url) do |curl|
156
+ setup_easy curl, options
157
+
158
+ curl.headers["If-Modified-Since"] = options[:if_modified_since].httpdate if options.has_key?(:if_modified_since)
159
+ curl.headers["If-None-Match"] = options[:if_none_match] if options.has_key?(:if_none_match)
160
+
161
+ curl.on_success do |c|
162
+ responses[url] = decode_content(c)
163
+ end
164
+ curl.on_failure do |c, err|
165
+ responses[url] = c.response_code
166
+ end
167
+ end
168
+ multi.add(easy)
169
+ end
170
+
171
+ multi.perform
172
+ urls.is_a?(String) ? responses.values.first : responses
173
+ end
174
+
175
+ # Fetches and returns the parsed XML for each URL provided.
176
+ #
177
+ # === Parameters
178
+ # [urls<String> or <Array>] A single feed URL, or an array of feed URLs.
179
+ # [options<Hash>] Valid keys for this argument as as followed:
180
+ # * :user_agent - String that overrides the default user agent.
181
+ # * :if_modified_since - Time object representing when the feed was last updated.
182
+ # * :if_none_match - String, an etag for the request that was stored previously.
183
+ # * :on_success - Block that gets executed after a successful request.
184
+ # * :on_failure - Block that gets executed after a failed request.
185
+ # === Returns
186
+ # A Feed object if a single URL is passed.
187
+ #
188
+ # A Hash if multiple URL's are passed. The key will be the URL, and the value the Feed object.
189
+ def self.fetch_and_parse(urls, options = {})
190
+ url_queue = [*urls]
191
+ multi = Curl::Multi.new
192
+ responses = {}
193
+
194
+ # I broke these down so I would only try to do 30 simultaneously because
195
+ # I was getting weird errors when doing a lot. As one finishes it pops another off the queue.
196
+ url_queue.slice!(0, 30).each do |url|
197
+ add_url_to_multi(multi, url, url_queue, responses, options)
198
+ end
199
+
200
+ multi.perform
201
+ return urls.is_a?(String) ? responses.values.first : responses
202
+ end
203
+
204
+ # Decodes the XML document if it was compressed.
205
+ #
206
+ # === Parameters
207
+ # [curl_request<Curl::Easy>] The Curl::Easy response object from the request.
208
+ # === Returns
209
+ # A decoded string of XML.
210
+ def self.decode_content(c)
211
+ if c.header_str.match(/Content-Encoding: gzip/i)
212
+ begin
213
+ gz = Zlib::GzipReader.new(StringIO.new(c.body_str))
214
+ xml = gz.read
215
+ gz.close
216
+ rescue Zlib::GzipFile::Error
217
+ # Maybe this is not gzipped?
218
+ xml = c.body_str
219
+ end
220
+ elsif c.header_str.match(/Content-Encoding: deflate/i)
221
+ xml = Zlib::Inflate.inflate(c.body_str)
222
+ else
223
+ xml = c.body_str
224
+ end
225
+
226
+ xml
227
+ end
228
+
229
+ # Updates each feed for each Feed object provided.
230
+ #
231
+ # === Parameters
232
+ # [feeds<Feed> or <Array>] A single feed object, or an array of feed objects.
233
+ # [options<Hash>] Valid keys for this argument as as followed:
234
+ # * :on_success - Block that gets executed after a successful request.
235
+ # * :on_failure - Block that gets executed after a failed request.
236
+ # * all parameters defined in setup_easy
237
+ # === Returns
238
+ # A updated Feed object if a single URL is passed.
239
+ #
240
+ # A Hash if multiple Feeds are passed. The key will be the URL, and the value the updated Feed object.
241
+ def self.update(feeds, options = {})
242
+ feed_queue = [*feeds]
243
+ multi = Curl::Multi.new
244
+ responses = {}
245
+
246
+ feed_queue.slice!(0, 30).each do |feed|
247
+ add_feed_to_multi(multi, feed, feed_queue, responses, options)
248
+ end
249
+
250
+ multi.perform
251
+ responses.is_a?(Array)? responses.values : responses.values.first
252
+ end
253
+
254
+ # An abstraction for adding a feed by URL to the passed Curb::multi stack.
255
+ #
256
+ # === Parameters
257
+ # [multi<Curl::Multi>] The Curl::Multi object that the request should be added too.
258
+ # [url<String>] The URL of the feed that you would like to be fetched.
259
+ # [url_queue<Array>] An array of URLs that are queued for request.
260
+ # [responses<Hash>] Existing responses that you want the response from the request added to.
261
+ # [feeds<String> or <Array>] A single feed object, or an array of feed objects.
262
+ # [options<Hash>] Valid keys for this argument as as followed:
263
+ # * :on_success - Block that gets executed after a successful request.
264
+ # * :on_failure - Block that gets executed after a failed request.
265
+ # * all parameters defined in setup_easy
266
+ # === Returns
267
+ # The updated Curl::Multi object with the request details added to it's stack.
268
+ def self.add_url_to_multi(multi, url, url_queue, responses, options)
269
+ easy = Curl::Easy.new(url) do |curl|
270
+ setup_easy curl, options
271
+ curl.headers["If-Modified-Since"] = options[:if_modified_since].httpdate if options.has_key?(:if_modified_since)
272
+ curl.headers["If-None-Match"] = options[:if_none_match] if options.has_key?(:if_none_match)
273
+
274
+ curl.on_success do |c|
275
+ add_url_to_multi(multi, url_queue.shift, url_queue, responses, options) unless url_queue.empty?
276
+ xml = decode_content(c)
277
+ klass = determine_feed_parser_for_xml(xml)
278
+
279
+ if klass
280
+ begin
281
+ feed = klass.parse(xml, Proc.new{|message| puts "Error while parsing [#{url}] #{message}" })
282
+ feed.feed_url = c.last_effective_url
283
+ feed.etag = etag_from_header(c.header_str)
284
+ feed.last_modified = last_modified_from_header(c.header_str)
285
+ responses[url] = feed
286
+ options[:on_success].call(url, feed) if options.has_key?(:on_success)
287
+ rescue Exception => e
288
+ options[:on_failure].call(url, c.response_code, c.header_str, c.body_str) if options.has_key?(:on_failure)
289
+ end
290
+ else
291
+ # puts "Error determining parser for #{url} - #{c.last_effective_url}"
292
+ # raise NoParserAvailable.new("no valid parser for content.") (this would unfortunately fail the whole 'multi', so it's not really usable)
293
+ options[:on_failure].call(url, c.response_code, c.header_str, c.body_str) if options.has_key?(:on_failure)
294
+ end
295
+ end
296
+
297
+ curl.on_failure do |c, err|
298
+ add_url_to_multi(multi, url_queue.shift, url_queue, responses, options) unless url_queue.empty?
299
+ responses[url] = c.response_code
300
+ if c.response_code == 304 # it's not modified. this isn't an error condition
301
+ options[:on_success].call(url, nil) if options.has_key?(:on_success)
302
+ else
303
+ options[:on_failure].call(url, c.response_code, c.header_str, c.body_str) if options.has_key?(:on_failure)
304
+ end
305
+ end
306
+ end
307
+ multi.add(easy)
308
+ end
309
+
310
+ # An abstraction for adding a feed by a Feed object to the passed Curb::multi stack.
311
+ #
312
+ # === Parameters
313
+ # [multi<Curl::Multi>] The Curl::Multi object that the request should be added too.
314
+ # [feed<Feed>] A feed object that you would like to be fetched.
315
+ # [url_queue<Array>] An array of feed objects that are queued for request.
316
+ # [responses<Hash>] Existing responses that you want the response from the request added to.
317
+ # [feeds<String>] or <Array> A single feed object, or an array of feed objects.
318
+ # [options<Hash>] Valid keys for this argument as as followed:
319
+ # * :on_success - Block that gets executed after a successful request.
320
+ # * :on_failure - Block that gets executed after a failed request.
321
+ # * all parameters defined in setup_easy
322
+ # === Returns
323
+ # The updated Curl::Multi object with the request details added to it's stack.
324
+ def self.add_feed_to_multi(multi, feed, feed_queue, responses, options)
325
+ easy = Curl::Easy.new(feed.feed_url) do |curl|
326
+ setup_easy curl, options
327
+ curl.headers["If-Modified-Since"] = feed.last_modified.httpdate if feed.last_modified
328
+ curl.headers["If-Modified-Since"] = options[:if_modified_since] if options[:if_modified_since] && (!feed.last_modified || (Time.parse(options[:if_modified_since].to_s) > feed.last_modified))
329
+ curl.headers["If-None-Match"] = feed.etag if feed.etag
330
+
331
+ curl.on_success do |c|
332
+ begin
333
+ add_feed_to_multi(multi, feed_queue.shift, feed_queue, responses, options) unless feed_queue.empty?
334
+ updated_feed = Feed.parse(c.body_str){ |message| puts "Error while parsing [#{feed.feed_url}] #{message}" }
335
+ updated_feed.feed_url = c.last_effective_url
336
+ updated_feed.etag = etag_from_header(c.header_str)
337
+ updated_feed.last_modified = last_modified_from_header(c.header_str)
338
+ feed.update_from_feed(updated_feed)
339
+ responses[feed.feed_url] = feed
340
+ options[:on_success].call(feed) if options.has_key?(:on_success)
341
+ rescue Exception => e
342
+ options[:on_failure].call(feed, c.response_code, c.header_str, c.body_str) if options.has_key?(:on_failure)
343
+ end
344
+ end
345
+
346
+ curl.on_failure do |c, err|
347
+ add_feed_to_multi(multi, feed_queue.shift, feed_queue, responses, options) unless feed_queue.empty?
348
+ response_code = c.response_code
349
+ if response_code == 304 # it's not modified. this isn't an error condition
350
+ responses[feed.feed_url] = feed
351
+ options[:on_success].call(feed) if options.has_key?(:on_success)
352
+ else
353
+ responses[feed.url] = c.response_code
354
+ options[:on_failure].call(feed, c.response_code, c.header_str, c.body_str) if options.has_key?(:on_failure)
355
+ end
356
+ end
357
+ end
358
+ multi.add(easy)
359
+ end
360
+
361
+ # Determines the etag from the request headers.
362
+ #
363
+ # === Parameters
364
+ # [header<String>] Raw request header returned from the request
365
+ # === Returns
366
+ # A string of the etag or nil if it cannot be found in the headers.
367
+ def self.etag_from_header(header)
368
+ header =~ /.*ETag:\s(.*)\r/
369
+ $1
370
+ end
371
+
372
+ # Determines the last modified date from the request headers.
373
+ #
374
+ # === Parameters
375
+ # [header<String>] Raw request header returned from the request
376
+ # === Returns
377
+ # A Time object of the last modified date or nil if it cannot be found in the headers.
378
+ def self.last_modified_from_header(header)
379
+ header =~ /.*Last-Modified:\s(.*)\r/
380
+ Time.parse($1) if $1
381
+ end
382
+ end
383
+ end
@@ -0,0 +1,65 @@
1
+ module Feedzirra
2
+ module FeedEntryUtilities
3
+
4
+ include Enumerable
5
+
6
+ def published
7
+ @published ||= @updated
8
+ end
9
+
10
+ def parse_datetime(string)
11
+ begin
12
+ DateTime.parse(string).feed_utils_to_gm_time
13
+ rescue
14
+ puts "DATE CAN'T BE PARSED: [#{string}]"
15
+ nil
16
+ end
17
+ end
18
+
19
+ ##
20
+ # Returns the id of the entry or its url if not id is present, as some formats don't support it
21
+ def id
22
+ @entry_id ||= @url
23
+ end
24
+
25
+ ##
26
+ # Writer for published. By default, we keep the "oldest" publish time found.
27
+ def published=(val)
28
+ parsed = parse_datetime(val)
29
+ @published = parsed if !@published || parsed < @published
30
+ end
31
+
32
+ ##
33
+ # Writer for updated. By default, we keep the most recent update time found.
34
+ def updated=(val)
35
+ parsed = parse_datetime(val)
36
+ @updated = parsed if !@updated || parsed > @updated
37
+ end
38
+
39
+ def sanitize!
40
+ self.title.sanitize! if self.title
41
+ self.author.sanitize! if self.author
42
+ self.summary.sanitize! if self.summary
43
+ self.content.sanitize! if self.content
44
+ end
45
+
46
+ alias_method :last_modified, :published
47
+
48
+ def each
49
+ @rss_fields ||= self.instance_variables
50
+
51
+ @rss_fields.each do |field|
52
+ yield(field.to_s.sub('@', ''), self.instance_variable_get(field))
53
+ end
54
+ end
55
+
56
+ def [](field)
57
+ self.instance_variable_get("@#{field.to_s}")
58
+ end
59
+
60
+ def []=(field, value)
61
+ self.instance_variable_set("@#{field.to_s}", value)
62
+ end
63
+
64
+ end
65
+ end
@@ -0,0 +1,72 @@
1
+ module Feedzirra
2
+ module FeedUtilities
3
+ UPDATABLE_ATTRIBUTES = %w(title feed_url url last_modified etag)
4
+
5
+ attr_writer :new_entries, :updated, :last_modified
6
+ attr_accessor :etag
7
+
8
+ def last_modified
9
+ @last_modified ||= begin
10
+ entry = entries.reject {|e| e.published.nil? }.sort_by { |entry| entry.published if entry.published }.last
11
+ entry ? entry.published : nil
12
+ end
13
+ end
14
+
15
+ def updated?
16
+ @updated
17
+ end
18
+
19
+ def new_entries
20
+ @new_entries ||= []
21
+ end
22
+
23
+ def has_new_entries?
24
+ new_entries.size > 0
25
+ end
26
+
27
+ def update_from_feed(feed)
28
+ self.new_entries += find_new_entries_for(feed)
29
+ self.entries.unshift(*self.new_entries)
30
+
31
+ @updated = false
32
+ UPDATABLE_ATTRIBUTES.each do |name|
33
+ updated = update_attribute(feed, name)
34
+ @updated ||= updated
35
+ end
36
+ end
37
+
38
+ def update_attribute(feed, name)
39
+ old_value, new_value = send(name), feed.send(name)
40
+
41
+ if old_value != new_value
42
+ send("#{name}=", new_value)
43
+ end
44
+ end
45
+
46
+ def sanitize_entries!
47
+ entries.each {|entry| entry.sanitize!}
48
+ end
49
+
50
+ private
51
+
52
+ def find_new_entries_for(feed)
53
+ # this implementation is a hack, which is why it's so ugly.
54
+ # it's to get around the fact that not all feeds have a published date.
55
+ # however, they're always ordered with the newest one first.
56
+ # So we go through the entries just parsed and insert each one as a new entry
57
+ # until we get to one that has the same url as the the newest for the feed
58
+ return feed.entries if self.entries.length == 0
59
+ latest_entry = self.entries.first
60
+ found_new_entries = []
61
+ feed.entries.each do |entry|
62
+ break if entry.url == latest_entry.url
63
+ found_new_entries << entry
64
+ end
65
+ found_new_entries
66
+ end
67
+
68
+ def existing_entry?(test_entry)
69
+ entries.any? { |entry| entry.url == test_entry.url }
70
+ end
71
+ end
72
+ end
@@ -0,0 +1,29 @@
1
+ module Feedzirra
2
+
3
+ module Parser
4
+ # Parser for dealing with Atom feeds.
5
+ class Atom
6
+ include SAXMachine
7
+ include FeedUtilities
8
+ element :title
9
+ element :subtitle, :as => :description
10
+ element :link, :as => :url, :value => :href, :with => {:type => "text/html"}
11
+ element :link, :as => :feed_url, :value => :href, :with => {:type => "application/atom+xml"}
12
+ elements :link, :as => :links, :value => :href
13
+ elements :entry, :as => :entries, :class => AtomEntry
14
+
15
+ def self.able_to_parse?(xml) #:nodoc:
16
+ /\<feed[^\>]+xmlns=[\"|\'](http:\/\/www\.w3\.org\/2005\/Atom|http:\/\/purl\.org\/atom\/ns\#)[\"|\'][^\>]*\>/ =~ xml
17
+ end
18
+
19
+ def url
20
+ @url || links.last
21
+ end
22
+
23
+ def feed_url
24
+ @feed_url ||= links.first
25
+ end
26
+ end
27
+ end
28
+
29
+ end
@@ -0,0 +1,30 @@
1
+ module Feedzirra
2
+
3
+ module Parser
4
+ # Parser for dealing with Atom feed entries.
5
+ class AtomEntry
6
+ include SAXMachine
7
+ include FeedEntryUtilities
8
+
9
+ element :title
10
+ element :link, :as => :url, :value => :href, :with => {:type => "text/html", :rel => "alternate"}
11
+ element :name, :as => :author
12
+ element :content
13
+ element :summary
14
+ element :published
15
+ element :id, :as => :entry_id
16
+ element :created, :as => :published
17
+ element :issued, :as => :published
18
+ element :updated
19
+ element :modified, :as => :updated
20
+ elements :category, :as => :categories, :value => :term
21
+ elements :link, :as => :links, :value => :href
22
+
23
+ def url
24
+ @url ||= links.first
25
+ end
26
+ end
27
+
28
+ end
29
+
30
+ end
@@ -0,0 +1,21 @@
1
+ module Feedzirra
2
+
3
+ module Parser
4
+ # Parser for dealing with Feedburner Atom feeds.
5
+ class AtomFeedBurner
6
+ include SAXMachine
7
+ include FeedUtilities
8
+ element :title
9
+ element :subtitle, :as => :description
10
+ element :link, :as => :url, :value => :href, :with => {:type => "text/html"}
11
+ element :link, :as => :feed_url, :value => :href, :with => {:type => "application/atom+xml"}
12
+ elements :entry, :as => :entries, :class => AtomFeedBurnerEntry
13
+
14
+ def self.able_to_parse?(xml) #:nodoc:
15
+ ((/Atom/ =~ xml) && (/feedburner/ =~ xml) && !(/\<rss|\<rdf/ =~ xml)) || false
16
+ end
17
+ end
18
+
19
+ end
20
+
21
+ end
@@ -0,0 +1,31 @@
1
+ module Feedzirra
2
+
3
+ module Parser
4
+ # Parser for dealing with Feedburner Atom feed entries.
5
+ class AtomFeedBurnerEntry
6
+ include SAXMachine
7
+ include FeedEntryUtilities
8
+
9
+ element :title
10
+ element :name, :as => :author
11
+ element :link, :as => :url, :value => :href, :with => {:type => "text/html", :rel => "alternate"}
12
+ element :"feedburner:origLink", :as => :url
13
+ element :summary
14
+ element :content
15
+ element :published
16
+ element :id, :as => :entry_id
17
+ element :issued, :as => :published
18
+ element :created, :as => :published
19
+ element :updated
20
+ element :modified, :as => :updated
21
+ elements :category, :as => :categories, :value => :term
22
+ elements :link, :as => :links, :value => :href
23
+
24
+ def url
25
+ @url ||= links.first
26
+ end
27
+
28
+ end
29
+ end
30
+
31
+ end
@@ -0,0 +1,50 @@
1
+ module Feedzirra
2
+
3
+ module Parser
4
+ # iTunes is RSS 2.0 + some apple extensions
5
+ # Source: http://www.apple.com/itunes/whatson/podcasts/specs.html
6
+ class ITunesRSS
7
+ include SAXMachine
8
+ include FeedUtilities
9
+
10
+ attr_accessor :feed_url
11
+
12
+ # RSS 2.0 elements that need including
13
+ element :copyright
14
+ element :description
15
+ element :language
16
+ element :managingEditor
17
+ element :title
18
+ element :link, :as => :url
19
+
20
+ # If author is not present use managingEditor on the channel
21
+ element :"itunes:author", :as => :itunes_author
22
+ element :"itunes:block", :as => :itunes_block
23
+ element :"itunes:image", :value => :href, :as => :itunes_image
24
+ element :"itunes:explicit", :as => :itunes_explicit
25
+ element :"itunes:keywords", :as => :itunes_keywords
26
+ # New URL for the podcast feed
27
+ element :"itunes:new-feed-url", :as => :itunes_new_feed_url
28
+ element :"itunes:subtitle", :as => :itunes_subtitle
29
+ # If summary is not present, use the description tag
30
+ element :"itunes:summary", :as => :itunes_summary
31
+
32
+ # iTunes RSS feeds can have multiple main categories...
33
+ # ...and multiple sub-categories per category
34
+ # TODO subcategories not supported correctly - they are at the same level
35
+ # as the main categories
36
+ elements :"itunes:category", :as => :itunes_categories, :value => :text
37
+
38
+ elements :"itunes:owner", :as => :itunes_owners, :class => ITunesRSSOwner
39
+
40
+ elements :item, :as => :entries, :class => ITunesRSSItem
41
+
42
+ def self.able_to_parse?(xml)
43
+ /xmlns:itunes=\"http:\/\/www.itunes.com\/dtds\/podcast-1.0.dtd\"/i =~ xml
44
+ end
45
+
46
+ end
47
+
48
+ end
49
+
50
+ end
@@ -0,0 +1,32 @@
1
+ module Feedzirra
2
+
3
+ module Parser
4
+ # iTunes extensions to the standard RSS2.0 item
5
+ # Source: http://www.apple.com/itunes/whatson/podcasts/specs.html
6
+ class ITunesRSSItem
7
+ include SAXMachine
8
+ include FeedEntryUtilities
9
+
10
+ element :author
11
+ element :guid
12
+ element :title
13
+ element :link, :as => :url
14
+ element :description, :as => :summary
15
+ element :pubDate, :as => :published
16
+
17
+ # If author is not present use author tag on the item
18
+ element :"itunes:author", :as => :itunes_author
19
+ element :"itunes:block", :as => :itunes_block
20
+ element :"itunes:duration", :as => :itunes_duration
21
+ element :"itunes:explicit", :as => :itunes_explicit
22
+ element :"itunes:keywords", :as => :itunes_keywords
23
+ element :"itunes:subtitle", :as => :itunes_subtitle
24
+ # If summary is not present, use the description tag
25
+ element :"itunes:summary", :as => :itunes_summary
26
+ element :enclosure, :value => :length, :as => :enclosure_length
27
+ element :enclosure, :value => :type, :as => :enclosure_type
28
+ element :enclosure, :value => :url, :as => :enclosure_url
29
+ end
30
+ end
31
+
32
+ end
@@ -0,0 +1,12 @@
1
+ module Feedzirra
2
+
3
+ module Parser
4
+ class ITunesRSSOwner
5
+ include SAXMachine
6
+ include FeedUtilities
7
+ element :"itunes:name", :as => :name
8
+ element :"itunes:email", :as => :email
9
+ end
10
+ end
11
+
12
+ end
@@ -0,0 +1,22 @@
1
+ module Feedzirra
2
+
3
+ module Parser
4
+ # Parser for dealing with RSS feeds.
5
+ class RSS
6
+ include SAXMachine
7
+ include FeedUtilities
8
+ element :title
9
+ element :description
10
+ element :link, :as => :url
11
+ elements :item, :as => :entries, :class => RSSEntry
12
+
13
+ attr_accessor :feed_url
14
+
15
+ def self.able_to_parse?(xml) #:nodoc:
16
+ (/\<rss|\<rdf/ =~ xml) && !(/feedburner/ =~ xml)
17
+ end
18
+ end
19
+
20
+ end
21
+
22
+ end
@@ -0,0 +1,34 @@
1
+ module Feedzirra
2
+
3
+ module Parser
4
+ # Parser for dealing with RDF feed entries.
5
+ class RSSEntry
6
+ include SAXMachine
7
+ include FeedEntryUtilities
8
+
9
+ element :title
10
+ element :link, :as => :url
11
+
12
+ element :"dc:creator", :as => :author
13
+ element :author, :as => :author
14
+ element :"content:encoded", :as => :content
15
+ element :description, :as => :summary
16
+
17
+ element :pubDate, :as => :published
18
+ element :pubdate, :as => :published
19
+ element :"dc:date", :as => :published
20
+ element :"dc:Date", :as => :published
21
+ element :"dcterms:created", :as => :published
22
+
23
+
24
+ element :"dcterms:modified", :as => :updated
25
+ element :issued, :as => :published
26
+ elements :category, :as => :categories
27
+
28
+ element :guid, :as => :entry_id
29
+
30
+ end
31
+
32
+ end
33
+
34
+ end
@@ -0,0 +1,22 @@
1
+ module Feedzirra
2
+
3
+ module Parser
4
+ # Parser for dealing with RSS feeds.
5
+ class RSSFeedBurner
6
+ include SAXMachine
7
+ include FeedUtilities
8
+ element :title
9
+ element :description
10
+ element :link, :as => :url
11
+ elements :item, :as => :entries, :class => RSSFeedBurnerEntry
12
+
13
+ attr_accessor :feed_url
14
+
15
+ def self.able_to_parse?(xml) #:nodoc:
16
+ (/\<rss|\<rdf/ =~ xml) && (/feedburner/ =~ xml)
17
+ end
18
+ end
19
+
20
+ end
21
+
22
+ end
@@ -0,0 +1,40 @@
1
+ module Feedzirra
2
+
3
+ module Parser
4
+ # Parser for dealing with RDF feed entries.
5
+ class RSSFeedBurnerEntry
6
+ include SAXMachine
7
+ include FeedEntryUtilities
8
+
9
+ element :title
10
+
11
+ element :"feedburner:origLink", :as => :url
12
+ element :link, :as => :url
13
+
14
+ element :"dc:creator", :as => :author
15
+ element :author, :as => :author
16
+ element :"content:encoded", :as => :content
17
+ element :description, :as => :summary
18
+
19
+ element :pubDate, :as => :published
20
+ element :pubdate, :as => :published
21
+ element :"dc:date", :as => :published
22
+ element :"dc:Date", :as => :published
23
+ element :"dcterms:created", :as => :published
24
+
25
+
26
+ element :"dcterms:modified", :as => :updated
27
+ element :issued, :as => :published
28
+ elements :category, :as => :categories
29
+
30
+ element :guid, :as => :entry_id
31
+
32
+ def url
33
+ @url || @link
34
+ end
35
+
36
+ end
37
+
38
+ end
39
+
40
+ end
@@ -0,0 +1,17 @@
1
+ module Feedzirra
2
+ module Parser
3
+ autoload :RSS, 'feedzirra/parser/rss'
4
+ autoload :RSSEntry, 'feedzirra/parser/rss_entry'
5
+ autoload :RSSFeedBurner, 'feedzirra/parser/rss_feed_burner'
6
+ autoload :RSSFeedBurnerEntry, 'feedzirra/parser/rss_feed_burner_entry'
7
+
8
+ autoload :ITunesRSS, 'feedzirra/parser/itunes_rss'
9
+ autoload :ITunesRSSItem, 'feedzirra/parser/itunes_rss_item'
10
+ autoload :ITunesRSSOwner, 'feedzirra/parser/itunes_rss_owner'
11
+
12
+ autoload :Atom, 'feedzirra/parser/atom'
13
+ autoload :AtomEntry, 'feedzirra/parser/atom_entry'
14
+ autoload :AtomFeedBurner, 'feedzirra/parser/atom_feed_burner'
15
+ autoload :AtomFeedBurnerEntry, 'feedzirra/parser/atom_feed_burner_entry'
16
+ end
17
+ end
@@ -0,0 +1,3 @@
1
+ module Feedzirra
2
+ VERSION = '0.1.1'
3
+ end
@@ -0,0 +1,19 @@
1
+ require 'zlib'
2
+ require 'curb'
3
+ require 'sax-machine'
4
+ require 'loofah'
5
+ require 'uri'
6
+
7
+ require 'active_support/basic_object'
8
+ require 'active_support/core_ext/module'
9
+ require 'active_support/core_ext/object'
10
+ require 'active_support/time'
11
+
12
+ require 'feedzirra/core_ext'
13
+
14
+ module Feedzirra
15
+ autoload :FeedEntryUtilities, 'feedzirra/feed_entry_utilities'
16
+ autoload :FeedUtilities, 'feedzirra/feed_utilities'
17
+ autoload :Feed, 'feedzirra/feed'
18
+ autoload :Parser, 'feedzirra/parser'
19
+ end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fandango
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.3
4
+ version: 0.1.4
5
5
  prerelease:
6
6
  platform: ruby
7
7
  authors:
@@ -9,14 +9,14 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2012-02-03 00:00:00.000000000Z
12
+ date: 2012-02-05 00:00:00.000000000Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: activesupport
16
- requirement: &70350391829420 !ruby/object:Gem::Requirement
16
+ requirement: &70168998909880 !ruby/object:Gem::Requirement
17
17
  none: false
18
18
  requirements:
19
- - - ! '>'
19
+ - - ! '>='
20
20
  - !ruby/object:Gem::Version
21
21
  version: 3.0.8
22
22
  - - <
@@ -24,21 +24,109 @@ dependencies:
24
24
  version: 3.2.0
25
25
  type: :runtime
26
26
  prerelease: false
27
- version_requirements: *70350391829420
27
+ version_requirements: *70168998909880
28
28
  - !ruby/object:Gem::Dependency
29
- name: feedzirra
30
- requirement: &70350391828440 !ruby/object:Gem::Requirement
29
+ name: nokogiri
30
+ requirement: &70168998909100 !ruby/object:Gem::Requirement
31
31
  none: false
32
32
  requirements:
33
- - - =
33
+ - - ! '>='
34
+ - !ruby/object:Gem::Version
35
+ version: 1.4.4
36
+ type: :runtime
37
+ prerelease: false
38
+ version_requirements: *70168998909100
39
+ - !ruby/object:Gem::Dependency
40
+ name: sax-machine
41
+ requirement: &70168998908620 !ruby/object:Gem::Requirement
42
+ none: false
43
+ requirements:
44
+ - - ~>
45
+ - !ruby/object:Gem::Version
46
+ version: 0.1.0
47
+ type: :runtime
48
+ prerelease: false
49
+ version_requirements: *70168998908620
50
+ - !ruby/object:Gem::Dependency
51
+ name: curb
52
+ requirement: &70168998908140 !ruby/object:Gem::Requirement
53
+ none: false
54
+ requirements:
55
+ - - ~>
56
+ - !ruby/object:Gem::Version
57
+ version: 0.7.15
58
+ type: :runtime
59
+ prerelease: false
60
+ version_requirements: *70168998908140
61
+ - !ruby/object:Gem::Dependency
62
+ name: builder
63
+ requirement: &70168998907660 !ruby/object:Gem::Requirement
64
+ none: false
65
+ requirements:
66
+ - - ! '>='
67
+ - !ruby/object:Gem::Version
68
+ version: 2.1.2
69
+ type: :runtime
70
+ prerelease: false
71
+ version_requirements: *70168998907660
72
+ - !ruby/object:Gem::Dependency
73
+ name: activesupport
74
+ requirement: &70168998907180 !ruby/object:Gem::Requirement
75
+ none: false
76
+ requirements:
77
+ - - ! '>='
78
+ - !ruby/object:Gem::Version
79
+ version: 3.0.8
80
+ type: :runtime
81
+ prerelease: false
82
+ version_requirements: *70168998907180
83
+ - !ruby/object:Gem::Dependency
84
+ name: loofah
85
+ requirement: &70168998906700 !ruby/object:Gem::Requirement
86
+ none: false
87
+ requirements:
88
+ - - ~>
89
+ - !ruby/object:Gem::Version
90
+ version: 1.2.0
91
+ type: :runtime
92
+ prerelease: false
93
+ version_requirements: *70168998906700
94
+ - !ruby/object:Gem::Dependency
95
+ name: rdoc
96
+ requirement: &70168998906200 !ruby/object:Gem::Requirement
97
+ none: false
98
+ requirements:
99
+ - - ~>
100
+ - !ruby/object:Gem::Version
101
+ version: '3.8'
102
+ type: :runtime
103
+ prerelease: false
104
+ version_requirements: *70168998906200
105
+ - !ruby/object:Gem::Dependency
106
+ name: rake
107
+ requirement: &70168998905720 !ruby/object:Gem::Requirement
108
+ none: false
109
+ requirements:
110
+ - - ! '>='
111
+ - !ruby/object:Gem::Version
112
+ version: 0.8.7
113
+ type: :runtime
114
+ prerelease: false
115
+ version_requirements: *70168998905720
116
+ - !ruby/object:Gem::Dependency
117
+ name: i18n
118
+ requirement: &70168998905180 !ruby/object:Gem::Requirement
119
+ none: false
120
+ requirements:
121
+ - - ! '>='
34
122
  - !ruby/object:Gem::Version
35
- version: 0.1.1
123
+ version: 0.5.0
36
124
  type: :runtime
37
125
  prerelease: false
38
- version_requirements: *70350391828440
126
+ version_requirements: *70168998905180
39
127
  - !ruby/object:Gem::Dependency
40
128
  name: awesome_print
41
- requirement: &70350391828060 !ruby/object:Gem::Requirement
129
+ requirement: &70168998904740 !ruby/object:Gem::Requirement
42
130
  none: false
43
131
  requirements:
44
132
  - - ! '>='
@@ -46,10 +134,10 @@ dependencies:
46
134
  version: '0'
47
135
  type: :development
48
136
  prerelease: false
49
- version_requirements: *70350391828060
137
+ version_requirements: *70168998904740
50
138
  - !ruby/object:Gem::Dependency
51
139
  name: mocha
52
- requirement: &70350391827400 !ruby/object:Gem::Requirement
140
+ requirement: &70168998904200 !ruby/object:Gem::Requirement
53
141
  none: false
54
142
  requirements:
55
143
  - - =
@@ -57,10 +145,10 @@ dependencies:
57
145
  version: 0.10.3
58
146
  type: :development
59
147
  prerelease: false
60
- version_requirements: *70350391827400
148
+ version_requirements: *70168998904200
61
149
  - !ruby/object:Gem::Dependency
62
150
  name: minitest
63
- requirement: &70350391826840 !ruby/object:Gem::Requirement
151
+ requirement: &70168998903660 !ruby/object:Gem::Requirement
64
152
  none: false
65
153
  requirements:
66
154
  - - =
@@ -68,7 +156,7 @@ dependencies:
68
156
  version: 2.11.1
69
157
  type: :development
70
158
  prerelease: false
71
- version_requirements: *70350391826840
159
+ version_requirements: *70168998903660
72
160
  description: Find theaters and movies on sale near a given postal code
73
161
  email:
74
162
  - jared@redningja.com
@@ -89,6 +177,29 @@ files:
89
177
  - lib/fandango/parsers/movie.rb
90
178
  - lib/fandango/parsers/theater.rb
91
179
  - lib/fandango/version.rb
180
+ - lib/feedzirra.rb
181
+ - lib/vendor/feedzirra/.gitignore
182
+ - lib/vendor/feedzirra/.rspec
183
+ - lib/vendor/feedzirra/lib/feedzirra.rb
184
+ - lib/vendor/feedzirra/lib/feedzirra/core_ext.rb
185
+ - lib/vendor/feedzirra/lib/feedzirra/core_ext/date.rb
186
+ - lib/vendor/feedzirra/lib/feedzirra/core_ext/string.rb
187
+ - lib/vendor/feedzirra/lib/feedzirra/feed.rb
188
+ - lib/vendor/feedzirra/lib/feedzirra/feed_entry_utilities.rb
189
+ - lib/vendor/feedzirra/lib/feedzirra/feed_utilities.rb
190
+ - lib/vendor/feedzirra/lib/feedzirra/parser.rb
191
+ - lib/vendor/feedzirra/lib/feedzirra/parser/atom.rb
192
+ - lib/vendor/feedzirra/lib/feedzirra/parser/atom_entry.rb
193
+ - lib/vendor/feedzirra/lib/feedzirra/parser/atom_feed_burner.rb
194
+ - lib/vendor/feedzirra/lib/feedzirra/parser/atom_feed_burner_entry.rb
195
+ - lib/vendor/feedzirra/lib/feedzirra/parser/itunes_rss.rb
196
+ - lib/vendor/feedzirra/lib/feedzirra/parser/itunes_rss_item.rb
197
+ - lib/vendor/feedzirra/lib/feedzirra/parser/itunes_rss_owner.rb
198
+ - lib/vendor/feedzirra/lib/feedzirra/parser/rss.rb
199
+ - lib/vendor/feedzirra/lib/feedzirra/parser/rss_entry.rb
200
+ - lib/vendor/feedzirra/lib/feedzirra/parser/rss_feed_burner.rb
201
+ - lib/vendor/feedzirra/lib/feedzirra/parser/rss_feed_burner_entry.rb
202
+ - lib/vendor/feedzirra/lib/feedzirra/version.rb
92
203
  - spec/fandango.spec.rb
93
204
  - spec/spec_helper.rb
94
205
  - spec/support/fixtures/movies_near_me_73142.rss