Add progress view.

master
pictuga 2013-05-01 17:57:09 +02:00
parent 5ee5dbf359
commit 949582ba19
2 changed files with 52 additions and 19 deletions

View File

@ -1,4 +1,5 @@
AddHandler cgi-script .py AddHandler cgi-script .py
Options +ExecCGI Options +ExecCGI
SetEnvIf Request_URI /progress/ no-gzip=1
ErrorDocument 404 /morss.py ErrorDocument 404 /morss.py

View File

@ -62,6 +62,36 @@ def cleanXML(xml):
table = string.maketrans('', '') table = string.maketrans('', '')
return xml.translate(table, table[:32]).lstrip() return xml.translate(table, table[:32]).lstrip()
def parseOptions(available):
options = None
if 'REQUEST_URI' in os.environ:
if 'REDIRECT_URL' in os.environ:
url = os.environ['REQUEST_URI'][1:]
else:
url = os.environ['REQUEST_URI'][len(os.environ['SCRIPT_NAME'])+1:]
if not url.startswith('http://') and not url.startswith('https://'):
split = url.split('/', 1)
if len(split) and split[0] in available:
options = split[0]
url = split[1]
url = "http://" + url
else:
if len(sys.argv) == 3:
if sys.argv[1] in available:
options = sys.argv[1]
url = sys.argv[2]
elif len(sys.argv) == 2:
url = sys.argv[1]
else:
return (None, None)
if not url.startswith('http://') and not url.startswith('https://'):
url = "http://" + url
return (url, options)
class Cache: class Cache:
"""Light, error-prone caching system.""" """Light, error-prone caching system."""
def __init__(self, folder, key): def __init__(self, folder, key):
@ -335,7 +365,7 @@ def Fill(rss, cache):
item.content = out item.content = out
cache.set(item.link, out) cache.set(item.link, out)
def Gather(url, cachePath): def Gather(url, cachePath, mode='feed'):
cache = Cache(cachePath, url) cache = Cache(cachePath, url)
# fetch feed # fetch feed
@ -349,7 +379,6 @@ def Gather(url, cachePath):
xml = urllib2.urlopen(req).read() xml = urllib2.urlopen(req).read()
cache.set(url, xml) cache.set(url, xml)
except (urllib2.HTTPError, urllib2.URLError): except (urllib2.HTTPError, urllib2.URLError):
print "Error, couldn't fetch RSS feed (the server might be banned from the given website)."
return False return False
xml = cleanXML(xml) xml = cleanXML(xml)
@ -361,37 +390,40 @@ def Gather(url, cachePath):
if MAX: if MAX:
for item in root.item[MAX:]: for item in root.item[MAX:]:
item.getparent().remove(item) item.getparent().remove(item)
for item in root.item: for i,item in enumerate(root.item):
if mode == 'progress':
print "%s/%s" % (i+1, len(root.item))
sys.stdout.flush()
Fill(item, cache) Fill(item, cache)
return root.tostring(xml_declaration=True, encoding='UTF-8') return root.tostring(xml_declaration=True, encoding='UTF-8')
if __name__ == "__main__": if __name__ == "__main__":
if 'REQUEST_URI' in os.environ: if 'REQUEST_URI' in os.environ:
url, options = parseOptions(['progress'])
print 'Status: 200' print 'Status: 200'
print 'Content-Type: text/html\n' print 'Content-Type: text/html\n'
if 'REDIRECT_URL' in os.environ:
url = os.environ['REQUEST_URI'][1:]
else:
url = os.environ['REQUEST_URI'][len(os.environ['SCRIPT_NAME'])+1:]
if not url.startswith('http://') and not url.startswith('https://'):
url = "http://" + url
url = url.replace(' ', '%20')
cache = os.getcwd() + '/cache' cache = os.getcwd() + '/cache'
log(url) log(url)
RSS = Gather(url, cache) RSS = Gather(url, cache, options)
else:
if len(sys.argv) > 1 and sys.argv[1].startswith('http'):
url = sys.argv[1]
cache = os.path.expanduser('~') + '/.cache/morss'
RSS = Gather(url, cache)
else: else:
url, options = parseOptions(['progress'])
print url, options
if url is None:
print "Please provide url." print "Please provide url."
sys.exit(1) sys.exit(1)
if 'REQUEST_URI' in os.environ or not os.getenv('DEBUG', False) and RSS is not False: cache = os.path.expanduser('~') + '/.cache/morss'
RSS = Gather(url, cache, options)
if RSS is not False and options != 'progress':
if 'REQUEST_URI' in os.environ or not os.getenv('DEBUG', False):
print RSS print RSS
if RSS is False and options != 'progress':
print "Error fetching feed."
log('done') log('done')