cache: avoid name collision

master
pictuga 2021-09-18 16:08:01 +02:00
parent 52c48b899f
commit a523518ae8
4 changed files with 6 additions and 6 deletions

View File

@ -28,7 +28,7 @@ from io import BytesIO, StringIO
import chardet import chardet
from .cache import default_cache from .caching import default_cache
try: try:
# python 2 # python 2

View File

@ -25,7 +25,7 @@ import lxml.etree
import lxml.html import lxml.html
from dateutil import tz from dateutil import tz
from . import crawler, feeds, readabilite from . import caching, crawler, feeds, readabilite
try: try:
# python 2 # python 2
@ -411,7 +411,7 @@ def process(url, cache=None, options=None):
options = Options(options) options = Options(options)
if cache: if cache:
crawler.default_cache = crawler.SQLiteCache(cache) caching.default_cache = caching.SQLiteCache(cache)
url, rss = FeedFetch(url, options) url, rss = FeedFetch(url, options)
rss = FeedGather(rss, url, options) rss = FeedGather(rss, url, options)

View File

@ -33,7 +33,7 @@ except ImportError:
# python 3 # python 3
from urllib.parse import unquote from urllib.parse import unquote
from . import crawler, readabilite from . import caching, crawler, readabilite
from .morss import (DELAY, TIMEOUT, FeedFetch, FeedFormat, FeedGather, from .morss import (DELAY, TIMEOUT, FeedFetch, FeedFormat, FeedGather,
MorssException, Options, log) MorssException, Options, log)
@ -287,7 +287,7 @@ class WSGIRequestHandlerRequestUri(wsgiref.simple_server.WSGIRequestHandler):
def cgi_start_server(): def cgi_start_server():
crawler.default_cache.autotrim() caching.default_cache.autotrim()
print('Serving http://localhost:%s/' % PORT) print('Serving http://localhost:%s/' % PORT)
httpd = wsgiref.simple_server.make_server('', PORT, application, handler_class=WSGIRequestHandlerRequestUri) httpd = wsgiref.simple_server.make_server('', PORT, application, handler_class=WSGIRequestHandlerRequestUri)
@ -295,4 +295,4 @@ def cgi_start_server():
if 'gunicorn' in os.getenv('SERVER_SOFTWARE', ''): if 'gunicorn' in os.getenv('SERVER_SOFTWARE', ''):
crawler.default_cache.autotrim() caching.default_cache.autotrim()