Compare commits

..

No commits in common. "f6851391374d1902c41fc10f9ef4428dcc7b9ffb" and "271ac8f80f5761da43f5345bec6a923b25677cf1" have entirely different histories.

2 changed files with 18 additions and 7 deletions

View File

@ -585,8 +585,14 @@ class SQLiteCache(BaseCache):
value[3] = sqlite3.Binary(value[3]) # data value[3] = sqlite3.Binary(value[3]) # data
value = tuple(value) value = tuple(value)
with self.con: if url in self:
self.con.execute('INSERT INTO data VALUES (?,?,?,?,?,?) ON CONFLICT(url) DO UPDATE SET code=?, msg=?, headers=?, data=?, timestamp=?', (url,) + value + value) with self.con:
self.con.execute('UPDATE data SET code=?, msg=?, headers=?, data=?, timestamp=? WHERE url=?',
value + (url,))
else:
with self.con:
self.con.execute('INSERT INTO data VALUES (?,?,?,?,?,?)', (url,) + value)
import pymysql.cursors import pymysql.cursors
@ -616,9 +622,14 @@ class MySQLCacheHandler(BaseCache):
return row[1:] return row[1:]
def __setitem__(self, url, value): # (code, msg, headers, data, timestamp) def __setitem__(self, url, value): # (code, msg, headers, data, timestamp)
with self.cursor() as cursor: if url in self:
cursor.execute('INSERT INTO data VALUES (%s,%s,%s,%s,%s,%s) ON DUPLICATE KEY UPDATE code=%s, msg=%s, headers=%s, data=%s, timestamp=%s', with self.cursor() as cursor:
(url,) + value + value) cursor.execute('UPDATE data SET code=%s, msg=%s, headers=%s, data=%s, timestamp=%s WHERE url=%s',
value + (url,))
else:
with self.cursor() as cursor:
cursor.execute('INSERT INTO data VALUES (%s,%s,%s,%s,%s,%s)', (url,) + value)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -279,7 +279,7 @@ def ItemBefore(item, options):
def ItemAfter(item, options): def ItemAfter(item, options):
if options.clip and item.desc and item.content: if options.clip and item.desc and item.content:
item.content = item.desc + "<br/><br/><hr/><br/><br/>" + item.content item.content = item.desc + "<br/><br/><center>* * *</center><br/><br/>" + item.content
del item.desc del item.desc
if options.nolink and item.content: if options.nolink and item.content:
@ -303,7 +303,7 @@ def FeedFetch(url, options):
delay = 0 delay = 0
try: try:
req = crawler.adv_get(url=url, follow=('rss' if not options.items else None), delay=delay, timeout=TIMEOUT * 2) req = crawler.adv_get(url=url, follow='rss', delay=delay, timeout=TIMEOUT * 2)
except (IOError, HTTPException): except (IOError, HTTPException):
raise MorssException('Error downloading feed') raise MorssException('Error downloading feed')