Use cleaner http error catch

One error type was inheriting from another one
master
pictuga 2013-09-15 18:52:34 +02:00
parent 2eb6e69b5a
commit 532852a408
1 changed files with 2 additions and 2 deletions

View File

@ -341,7 +341,7 @@ def Fill(item, cache, feedurl='/', fast=False):
url = item.link.encode('utf-8') url = item.link.encode('utf-8')
con = urllib2.build_opener(HTMLDownloader()).open(url, timeout=TIMEOUT) con = urllib2.build_opener(HTMLDownloader()).open(url, timeout=TIMEOUT)
data = con.read() data = con.read()
except (urllib2.HTTPError, urllib2.URLError, httplib.HTTPException, socket.timeout) as error: except (urllib2.URLError, httplib.HTTPException, socket.timeout) as error:
log('http error') log('http error')
cache.set(item.link, 'error-http') cache.set(item.link, 'error-http')
return True return True
@ -379,7 +379,7 @@ def Gather(url, cachePath, progress=False):
cache.set(url, xml) cache.set(url, xml)
cache.set('etag', con.headers.getheader('etag')) cache.set('etag', con.headers.getheader('etag'))
cache.set('lastmodified', con.headers.getheader('last-modified')) cache.set('lastmodified', con.headers.getheader('last-modified'))
except (urllib2.HTTPError, urllib2.URLError, httplib.HTTPException): except (urllib2.URLError, httplib.HTTPException, socket.timeout):
return False return False
rss = feeds.parse(xml) rss = feeds.parse(xml)