diff --git a/README.md b/README.md index 86b41c0..915f7b4 100644 --- a/README.md +++ b/README.md @@ -310,7 +310,7 @@ be cleared every time the program is run When parsing long feeds, with a lot of items (100+), morss might take a lot of time to parse it, or might even run into a memory overflow on some shared hosting plans (limits around 10Mb), in which case you might want to adjust the -different values at the top of the script. +below settings via environment variables. - `MAX_TIME` sets the maximum amount of time spent *fetching* articles, more time might be spent taking older articles from cache. `-1` for unlimited. - `MAX_ITEM` sets the maximum number of articles to fetch. `-1` for unlimited. More articles will be taken from cache following the nexts settings. diff --git a/morss/morss.py b/morss/morss.py index e6ffd74..a4f2502 100644 --- a/morss/morss.py +++ b/morss/morss.py @@ -24,14 +24,15 @@ except ImportError: from http.client import HTTPException from urllib.parse import urlparse, urljoin, parse_qs -MAX_ITEM = 5 # cache-only beyond -MAX_TIME = 2 # cache-only after (in sec) -LIM_ITEM = 10 # deletes what's beyond -LIM_TIME = 2.5 # deletes what's after +MAX_ITEM = int(os.getenv('MAX_ITEM', 5)) # cache-only beyond +MAX_TIME = int(os.getenv('MAX_TIME', 2)) # cache-only after (in sec) -DELAY = 10 * 60 # xml cache & ETag cache (in sec) -TIMEOUT = 4 # http timeout (in sec) +LIM_ITEM = int(os.getenv('LIM_ITEM', 10)) # deletes what's beyond +LIM_TIME = int(os.getenv('LIM_TIME', 2.5)) # deletes what's after + +DELAY = int(os.getenv('DELAY', 10 * 60)) # xml cache & ETag cache (in sec) +TIMEOUT = int(os.getenv('TIMEOUT', 4)) # http timeout (in sec) class MorssException(Exception):