Class: Feedzirra::Feed
- Inherits:
-
Object
- Object
- Feedzirra::Feed
- Defined in:
- lib/feedzirra/feed.rb
Constant Summary collapse
- USER_AGENT =
"feedzirra http://github.com/pauldix/feedzirra/tree/master"
Class Method Summary collapse
- .add_feed_class(klass) ⇒ Object
- .add_feed_to_multi(multi, feed, feed_queue, responses, options) ⇒ Object
- .add_url_to_multi(multi, url, url_queue, responses, options) ⇒ Object
- .decode_content(c) ⇒ Object
- .determine_feed_parser_for_xml(xml) ⇒ Object
- .etag_from_header(header) ⇒ Object
- .feed_classes ⇒ Object
- .fetch_and_parse(urls, options = {}) ⇒ Object
-
.fetch_raw(urls, options = {}) ⇒ Object
can take a single url or an array of urls when passed a single url it returns the body of the response when passed an array of urls it returns a hash with the urls as keys and body of responses as values.
- .last_modified_from_header(header) ⇒ Object
- .parse(xml) ⇒ Object
- .update(feeds, options = {}) ⇒ Object
Class Method Details
.add_feed_class(klass) ⇒ Object
20 21 22 |
# File 'lib/feedzirra/feed.rb', line 20 def self.add_feed_class(klass) feed_classes.unshift klass end |
.add_feed_to_multi(multi, feed, feed_queue, responses, options) ⇒ Object
128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 |
# File 'lib/feedzirra/feed.rb', line 128 def self.add_feed_to_multi(multi, feed, feed_queue, responses, ) # on_success = options[:on_success] # on_failure = options[:on_failure] # options[:on_success] = lambda do || easy = Curl::Easy.new(feed.feed_url) do |curl| curl.headers["User-Agent"] = ([:user_agent] || USER_AGENT) curl.headers["If-Modified-Since"] = feed.last_modified.httpdate if feed.last_modified curl.headers["If-None-Match"] = feed.etag if feed.etag curl.follow_location = true curl.on_success do |c| add_feed_to_multi(multi, feed_queue.shift, feed_queue, responses, ) unless feed_queue.empty? updated_feed = Feed.parse(c.body_str) updated_feed.feed_url = c.last_effective_url updated_feed.etag = etag_from_header(c.header_str) updated_feed.last_modified = last_modified_from_header(c.header_str) feed.update_from_feed(updated_feed) responses[feed.feed_url] = feed [:on_success].call(feed) if .has_key?(:on_success) end curl.on_failure do |c| add_feed_to_multi(multi, feed_queue.shift, feed_queue, responses, ) unless feed_queue.empty? response_code = c.response_code if response_code == 304 # it's not modified. this isn't an error condition responses[feed.feed_url] = feed [:on_success].call(feed) if .has_key?(:on_success) else responses[feed.url] = c.response_code [:on_failure].call(feed, c.response_code, c.header_str, c.body_str) if .has_key?(:on_failure) end end end multi.add(easy) end |
.add_url_to_multi(multi, url, url_queue, responses, options) ⇒ Object
97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 |
# File 'lib/feedzirra/feed.rb', line 97 def self.add_url_to_multi(multi, url, url_queue, responses, ) easy = Curl::Easy.new(url) do |curl| curl.headers["User-Agent"] = ([:user_agent] || USER_AGENT) curl.headers["If-Modified-Since"] = [:if_modified_since].httpdate if .has_key?(:if_modified_since) curl.headers["If-None-Match"] = [:if_none_match] if .has_key?(:if_none_match) curl.headers["Accept-encoding"] = 'gzip, deflate' curl.follow_location = true curl.on_success do |c| add_url_to_multi(multi, url_queue.shift, url_queue, responses, ) unless url_queue.empty? xml = decode_content(c) klass = determine_feed_parser_for_xml(xml) if klass feed = klass.parse(xml) feed.feed_url = c.last_effective_url feed.etag = etag_from_header(c.header_str) feed.last_modified = last_modified_from_header(c.header_str) responses[url] = feed [:on_success].call(url, feed) if .has_key?(:on_success) else puts "Error determining parser for #{url} - #{c.last_effective_url}" end end curl.on_failure do |c| add_url_to_multi(multi, url_queue.shift, url_queue, responses, ) unless url_queue.empty? responses[url] = c.response_code [:on_failure].call(url, c.response_code, c.header_str, c.body_str) if .has_key?(:on_failure) end end multi.add(easy) end |
.decode_content(c) ⇒ Object
71 72 73 74 75 76 77 78 79 80 81 82 83 |
# File 'lib/feedzirra/feed.rb', line 71 def self.decode_content(c) if c.header_str.match(/Content-Encoding: gzip/) gz = Zlib::GzipReader.new(StringIO.new(c.body_str)) xml = gz.read gz.close elsif c.header_str.match(/Content-Encoding: deflate/) xml = Zlib::Deflate.inflate(c.body_str) else xml = c.body_str end xml end |
.determine_feed_parser_for_xml(xml) ⇒ Object
15 16 17 18 |
# File 'lib/feedzirra/feed.rb', line 15 def self.determine_feed_parser_for_xml(xml) start_of_doc = xml.slice(0, 1000) feed_classes.detect {|klass| klass.able_to_parse?(start_of_doc)} end |
.etag_from_header(header) ⇒ Object
163 164 165 166 |
# File 'lib/feedzirra/feed.rb', line 163 def self.etag_from_header(header) header =~ /.*ETag:\s(.*)\r/ $1 end |
.feed_classes ⇒ Object
24 25 26 |
# File 'lib/feedzirra/feed.rb', line 24 def self.feed_classes @feed_classes ||= [RSS, AtomFeedBurner, Atom] end |
.fetch_and_parse(urls, options = {}) ⇒ Object
56 57 58 59 60 61 62 63 64 65 66 67 68 69 |
# File 'lib/feedzirra/feed.rb', line 56 def self.fetch_and_parse(urls, = {}) url_queue = [*urls] multi = Curl::Multi.new # I broke these down so I would only try to do 30 simultaneously because # I was getting weird errors when doing a lot. As one finishes it pops another off the queue. responses = {} url_queue.slice!(0, 30).each do |url| add_url_to_multi(multi, url, url_queue, responses, ) end multi.perform return urls.is_a?(String) ? responses.values.first : responses end |
.fetch_raw(urls, options = {}) ⇒ Object
can take a single url or an array of urls when passed a single url it returns the body of the response when passed an array of urls it returns a hash with the urls as keys and body of responses as values
31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 |
# File 'lib/feedzirra/feed.rb', line 31 def self.fetch_raw(urls, = {}) url_queue = [*urls] multi = Curl::Multi.new responses = {} url_queue.each do |url| easy = Curl::Easy.new(url) do |curl| curl.headers["User-Agent"] = ([:user_agent] || USER_AGENT) curl.headers["If-Modified-Since"] = [:if_modified_since].httpdate if .has_key?(:if_modified_since) curl.headers["If-None-Match"] = [:if_none_match] if .has_key?(:if_none_match) curl.headers["Accept-encoding"] = 'gzip, deflate' curl.follow_location = true curl.on_success do |c| responses[url] = decode_content(c) end curl.on_failure do |c| responses[url] = c.response_code end end multi.add(easy) end multi.perform return urls.is_a?(String) ? responses.values.first : responses end |
.last_modified_from_header(header) ⇒ Object
168 169 170 171 |
# File 'lib/feedzirra/feed.rb', line 168 def self.last_modified_from_header(header) header =~ /.*Last-Modified:\s(.*)\r/ Time.parse($1) if $1 end |
.parse(xml) ⇒ Object
7 8 9 10 11 12 13 |
# File 'lib/feedzirra/feed.rb', line 7 def self.parse(xml) if parser = determine_feed_parser_for_xml(xml) parser.parse(xml) else raise NoParserAvailable.new("no valid parser for content.") end end |
.update(feeds, options = {}) ⇒ Object
85 86 87 88 89 90 91 92 93 94 95 |
# File 'lib/feedzirra/feed.rb', line 85 def self.update(feeds, = {}) feed_queue = [*feeds] multi = Curl::Multi.new responses = {} feed_queue.slice!(0, 30).each do |feed| add_feed_to_multi(multi, feed, feed_queue, responses, ) end multi.perform return responses.size == 1 ? responses.values.first : responses.values end |