Compare commits

..

1 Commits

Author SHA1 Message Date
pictuga 7f4589c578 crawler: return dict instead of tuple 2020-04-28 22:10:20 +02:00
3 changed files with 6 additions and 6 deletions

View File

@ -251,7 +251,7 @@ options = morss.Options(csv=True) # arguments
morss.crawler.sqlite_default = '/tmp/morss-cache.db' # sqlite cache location morss.crawler.sqlite_default = '/tmp/morss-cache.db' # sqlite cache location
url = morss.UrlFix(url) # make sure the url is properly formatted url = morss.UrlFix(url) # make sure the url is properly formatted
url, rss = morss.FeedFetch(url, options) # this only grabs the RSS feed rss = morss.FeedFetch(url, options) # this only grabs the RSS feed
rss = morss.FeedGather(rss, url, options) # this fills the feed and cleans it up rss = morss.FeedGather(rss, url, options) # this fills the feed and cleans it up
output = morss.FeedFormat(rss, options, 'unicode') # formats final feed output = morss.FeedFormat(rss, options, 'unicode') # formats final feed

View File

@ -627,7 +627,7 @@ class MySQLCacheHandler(BaseCache):
if __name__ == '__main__': if __name__ == '__main__':
req = adv_get(sys.argv[1] if len(sys.argv) > 1 else 'https://morss.it') data, con, contenttype, encoding = adv_get(sys.argv[1] if len(sys.argv) > 1 else 'https://morss.it')
if not sys.flags.interactive: if not sys.flags.interactive:
print(req['data'].decode(req['encoding'])) print(req['data'].decode(req['encoding']))

View File

@ -339,7 +339,7 @@ def FeedFetch(url, options):
log(req['contenttype']) log(req['contenttype'])
raise MorssException('Link provided is not a valid feed') raise MorssException('Link provided is not a valid feed')
return req['url'], rss return rss
def FeedGather(rss, url, options): def FeedGather(rss, url, options):
@ -438,7 +438,7 @@ def process(url, cache=None, options=None):
if cache: if cache:
crawler.default_cache = crawler.SQLiteCache(cache) crawler.default_cache = crawler.SQLiteCache(cache)
url, rss = FeedFetch(url, options) rss = FeedFetch(url, options)
rss = FeedGather(rss, url, options) rss = FeedGather(rss, url, options)
return FeedFormat(rss, options, 'unicode') return FeedFormat(rss, options, 'unicode')
@ -510,7 +510,7 @@ def cgi_app(environ, start_response):
crawler.default_cache = crawler.SQLiteCache(os.path.join(os.getcwd(), 'morss-cache.db')) crawler.default_cache = crawler.SQLiteCache(os.path.join(os.getcwd(), 'morss-cache.db'))
# get the work done # get the work done
url, rss = FeedFetch(url, options) rss = FeedFetch(url, options)
if headers['content-type'] == 'text/xml': if headers['content-type'] == 'text/xml':
headers['content-type'] = rss.mimetype[0] headers['content-type'] = rss.mimetype[0]
@ -673,7 +673,7 @@ def cli_app():
crawler.default_cache = crawler.SQLiteCache(os.path.expanduser('~/.cache/morss-cache.db')) crawler.default_cache = crawler.SQLiteCache(os.path.expanduser('~/.cache/morss-cache.db'))
url, rss = FeedFetch(url, options) rss = FeedFetch(url, options)
rss = FeedGather(rss, url, options) rss = FeedGather(rss, url, options)
out = FeedFormat(rss, options, 'unicode') out = FeedFormat(rss, options, 'unicode')