parent
c27c38f7c7
commit
27a42c47aa
|
@ -251,7 +251,7 @@ options = morss.Options(csv=True) # arguments
|
||||||
morss.crawler.sqlite_default = '/tmp/morss-cache.db' # sqlite cache location
|
morss.crawler.sqlite_default = '/tmp/morss-cache.db' # sqlite cache location
|
||||||
|
|
||||||
url = morss.UrlFix(url) # make sure the url is properly formatted
|
url = morss.UrlFix(url) # make sure the url is properly formatted
|
||||||
rss = morss.FeedFetch(url, options) # this only grabs the RSS feed
|
url, rss = morss.FeedFetch(url, options) # this only grabs the RSS feed
|
||||||
rss = morss.FeedGather(rss, url, options) # this fills the feed and cleans it up
|
rss = morss.FeedGather(rss, url, options) # this fills the feed and cleans it up
|
||||||
|
|
||||||
output = morss.FeedFormat(rss, options, 'unicode') # formats final feed
|
output = morss.FeedFormat(rss, options, 'unicode') # formats final feed
|
||||||
|
|
|
@ -339,7 +339,7 @@ def FeedFetch(url, options):
|
||||||
log(req['contenttype'])
|
log(req['contenttype'])
|
||||||
raise MorssException('Link provided is not a valid feed')
|
raise MorssException('Link provided is not a valid feed')
|
||||||
|
|
||||||
return rss
|
return req['url'], rss
|
||||||
|
|
||||||
|
|
||||||
def FeedGather(rss, url, options):
|
def FeedGather(rss, url, options):
|
||||||
|
@ -438,7 +438,7 @@ def process(url, cache=None, options=None):
|
||||||
if cache:
|
if cache:
|
||||||
crawler.default_cache = crawler.SQLiteCache(cache)
|
crawler.default_cache = crawler.SQLiteCache(cache)
|
||||||
|
|
||||||
rss = FeedFetch(url, options)
|
url, rss = FeedFetch(url, options)
|
||||||
rss = FeedGather(rss, url, options)
|
rss = FeedGather(rss, url, options)
|
||||||
|
|
||||||
return FeedFormat(rss, options, 'unicode')
|
return FeedFormat(rss, options, 'unicode')
|
||||||
|
@ -510,7 +510,7 @@ def cgi_app(environ, start_response):
|
||||||
crawler.default_cache = crawler.SQLiteCache(os.path.join(os.getcwd(), 'morss-cache.db'))
|
crawler.default_cache = crawler.SQLiteCache(os.path.join(os.getcwd(), 'morss-cache.db'))
|
||||||
|
|
||||||
# get the work done
|
# get the work done
|
||||||
rss = FeedFetch(url, options)
|
url, rss = FeedFetch(url, options)
|
||||||
|
|
||||||
if headers['content-type'] == 'text/xml':
|
if headers['content-type'] == 'text/xml':
|
||||||
headers['content-type'] = rss.mimetype[0]
|
headers['content-type'] = rss.mimetype[0]
|
||||||
|
@ -673,7 +673,7 @@ def cli_app():
|
||||||
|
|
||||||
crawler.default_cache = crawler.SQLiteCache(os.path.expanduser('~/.cache/morss-cache.db'))
|
crawler.default_cache = crawler.SQLiteCache(os.path.expanduser('~/.cache/morss-cache.db'))
|
||||||
|
|
||||||
rss = FeedFetch(url, options)
|
url, rss = FeedFetch(url, options)
|
||||||
rss = FeedGather(rss, url, options)
|
rss = FeedGather(rss, url, options)
|
||||||
out = FeedFormat(rss, options, 'unicode')
|
out = FeedFormat(rss, options, 'unicode')
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue