Use seconds for every parameter

master
pictuga 2013-08-24 23:40:37 +02:00
parent b350602232
commit 0c6e28205a
1 changed files with 6 additions and 5 deletions

View File

@ -26,9 +26,9 @@ from readability import readability
LIM_ITEM = 100 # deletes what's beyond LIM_ITEM = 100 # deletes what's beyond
MAX_ITEM = 50 # cache-only beyond MAX_ITEM = 50 # cache-only beyond
MAX_TIME = 7 # cache-only after MAX_TIME = 7 # cache-only after (in sec)
DELAY = 10 # xml cache DELAY = 10*60 # xml cache (in sec)
TIMEOUT = 2 # http timeout TIMEOUT = 2 # http timeout (in sec)
OPTIONS = ['progress', 'cache'] OPTIONS = ['progress', 'cache']
@ -326,7 +326,7 @@ def Fill(item, cache, feedurl='/', fast=False):
content = cache.get(item.link) content = cache.get(item.link)
match = re.search(r'^error-([a-z]{2,10})$', content) match = re.search(r'^error-([a-z]{2,10})$', content)
if match: if match:
if cache.isYoungerThan(DELAY*60): if cache.isYoungerThan(DELAY):
log('cached error: %s' % match.groups()[0]) log('cached error: %s' % match.groups()[0])
return True return True
else: else:
@ -364,10 +364,11 @@ def Fill(item, cache, feedurl='/', fast=False):
return True return True
def Gather(url, cachePath, mode='feed'): def Gather(url, cachePath, mode='feed'):
url = url.replace(' ', '%20')
cache = Cache(cachePath, url) cache = Cache(cachePath, url)
# fetch feed # fetch feed
if cache.isYoungerThan(DELAY*60) and url in cache: if cache.isYoungerThan(DELAY) and url in cache:
log('xml cached') log('xml cached')
xml = cache.get(url) xml = cache.get(url)
else: else: