10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
|
# File 'lib/miteru/crawler.rb', line 10
def call(website)
Try[OpenSSL::SSL::SSLError, ::HTTP::Error, Addressable::URI::InvalidURIError] do
info = "Website:#{website.info}."
info = info.colorize(:red) if website.kits?
logger.info(info)
website.kits.each do |kit|
downloader = Downloader.new(kit)
result = downloader.result
unless result.success?
logger.warn("Kit:#{kit.truncated_url} failed to download - #{result.failure}.")
next
end
destination = result.value!
logger.info("Kit:#{kit.truncated_url} downloaded as #{destination}.")
FileUtils.rm(destination, force: true) unless auto_download?
notify(kit)
end
cache.set(website.url, website.source, ex: cache_ex) if cache?
end.recover { nil }.value!
end
|