User JSON to save cache

Faster, cleaner, safer, unobfuscated, not 1/3 bigger
master
pictuga 2013-11-30 19:59:19 +01:00
parent fde5d3e8c5
commit 1a50cb390a
1 changed files with 7 additions and 16 deletions

View File

@ -11,6 +11,7 @@ from fnmatch import fnmatch
from base64 import b64encode, b64decode from base64 import b64encode, b64decode
import re import re
import string import string
import json
import lxml.html import lxml.html
import lxml.html.clean import lxml.html.clean
@ -126,8 +127,7 @@ class Cache:
""" Light, error-prone caching system. """ """ Light, error-prone caching system. """
def __init__(self, folder, key, persistent=False): def __init__(self, folder, key, persistent=False):
self._key = key self._key = key
self._hash = str(hash(self._key)) self._hash = b64encode(self._key)
self._dir = folder self._dir = folder
self._file = self._dir + '/' + self._hash self._file = self._dir + '/' + self._hash
@ -135,11 +135,8 @@ class Cache:
self._cache = {} # new things to put in cache self._cache = {} # new things to put in cache
if os.path.isfile(self._file): if os.path.isfile(self._file):
data = open(self._file).readlines() data = open(self._file).read()
for line in data: self._cached = json.loads(data)
if "\t" in line:
key, bdata = line.split("\t", 1)
self._cached[key] = b64decode(bdata)
if persistent: if persistent:
self._cache = self._cached self._cache = self._cached
@ -166,17 +163,11 @@ class Cache:
if len(self._cache) == 0: if len(self._cache) == 0:
return return
out = []
for (key, data) in self._cache.iteritems():
bdata = b64encode(data)
out.append(str(key) + "\t" + bdata)
txt = "\n".join(out)
if not os.path.exists(self._dir): if not os.path.exists(self._dir):
os.makedirs(self._dir) os.makedirs(self._dir)
with open(self._file, 'w') as file: with open(self._file, 'w') as file:
file.write(txt) file.write(json.dumps(self._cache))
def isYoungerThan(self, sec): def isYoungerThan(self, sec):
if not os.path.exists(self._file): if not os.path.exists(self._file):