Compare commits

..

No commits in common. "4f2895f9315492f25e9ff31b8502cbd66e3c6dfd" and "bfaf7b0facfdacc64cc18ed6c0eda1793d2002f0" have entirely different histories.

5 changed files with 12 additions and 19 deletions

View File

@ -474,7 +474,6 @@ debugging.
- `IGNORE_SSL=1`: to ignore SSL certs when fetch feeds and articles
- `DELAY` (seconds) sets the browser cache delay, only for HTTP clients
- `TIMEOUT` (seconds) sets the HTTP timeout when fetching rss feeds and articles
- `DATA_PATH`: to set custom file location for the `www` folder
When parsing long feeds, with a lot of items (100+), morss might take a lot of
time to parse it, or might even run into a memory overflow on some shared

View File

@ -44,7 +44,7 @@ def cli_app():
group.add_argument('--cache', action='store_true', help='only take articles from the cache (ie. don\'t grab new articles\' content), so as to save time')
group.add_argument('--force', action='store_true', help='force refetch the rss feed and articles')
group.add_argument('--proxy', action='store_true', help='doesn\'t fill the articles')
group.add_argument('--order', default='first', choices=('first', 'last', 'newest', 'oldest'), help='order in which to process items (which are however NOT sorted in the output)')
group.add_argument('--newest', action='store_true', help='return the feed items in chronological order (morss ohterwise shows the items by appearing order)')
group.add_argument('--firstlink', action='store_true', help='pull the first article mentioned in the description instead of the default link')
group.add_argument('--resolve', action='store_true', help='replace tracking links with direct links to articles (not compatible with --proxy)')

View File

@ -65,8 +65,7 @@ def parse_rules(filename=None):
# for each rule
if rules[section][arg].startswith('file:'):
path = data_path('www', rules[section][arg][5:])
file_raw = open(path).read()
file_raw = open(data_path(rules[section][arg][5:])).read()
file_clean = re.sub('<[/?]?(xsl|xml)[^>]+?>', '', file_raw)
rules[section][arg] = file_clean

View File

@ -15,37 +15,32 @@
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <https://www.gnu.org/licenses/>.
import os
import os.path
import sys
def pkg_path(*path_elements):
return os.path.join(os.path.dirname(__file__), *path_elements)
def pkg_path(path=''):
return os.path.join(os.path.dirname(__file__), path)
data_path_base = None
def data_path(*path_elements):
def data_path(path=''):
global data_path_base
path = os.path.join(*path_elements)
if data_path_base is not None:
return os.path.join(data_path_base, path)
bases = [
os.path.join(sys.prefix, 'share/morss'), # when installed as root
pkg_path('../../../share/morss'),
pkg_path('../../../../share/morss'),
pkg_path('../share/morss'), # for `pip install --target=dir morss`
pkg_path('..'), # when running from source tree
os.path.join(sys.prefix, 'share/morss/www'), # when installed as root
pkg_path('../../../share/morss/www'),
pkg_path('../../../../share/morss/www'),
pkg_path('../share/morss/www'), # for `pip install --target=dir morss`
pkg_path('../www'), # when running from source tree
pkg_path('../..'), # when running on `.cgi` subdir on Apache
]
if 'DATA_PATH' in os.environ:
bases.append(os.environ['DATA_PATH'])
for base in bases:
full_path = os.path.join(base, path)

View File

@ -169,7 +169,7 @@ def cgi_file_handler(environ, start_response, app):
if re.match(r'^/?([a-zA-Z0-9_-][a-zA-Z0-9\._-]+/?)*$', url):
# if it is a legitimate url (no funny relative paths)
try:
path = data_path('www', url)
path = data_path(url)
f = open(path, 'rb')
except IOError: