patternpythonMinor
LRU cache in Python
Viewed 0 times
pythoncachelru
Problem
Here is my simple code for LRU cache in Python 2.7. Appreciate if anyone could review for logic correctness and also potential performance improvements.
A confusion want to ask for advice is, I am using a
A confusion want to ask for advice is, I am using a
list to track access time, the first element of the list the is least time accessed, and the last element is the most recent accessed element. And I am using remove method of list, for example, statement accessQueue.remove(key), wondering what is the time complexity of list.remove, is it linear or log(N)?from collections import defaultdict
kvStore = defaultdict(str)
accessQueue = []
maxSize = 10
def cacheSet(key, value):
kvStore[key] = value
if key in accessQueue:
accessQueue.remove(key)
accessQueue.append(key)
else:
accessQueue.append(key)
if len(accessQueue) > maxSize:
accessQueue.pop(0)
def cacheGet(key):
if key in kvStore:
accessQueue.remove(key)
accessQueue.append(key)
return kvStore[key]
else:
return None
def cachePrint():
for k,v in kvStore.items():
print k,v
if __name__ == "__main__":
for i in range(10):
cacheSet(i, "value " + str(i))
for i in range(10):
print cacheGet(i)
cacheSet(11, "value " + str(11))
cachePrint()Solution
This should be a class - this is what OOP exists to do, abstract away details like this and contain them all in an object. You'll want to subclass
collections.MutableMapping. You can also use an ordered dict to combine the lru behavior with the normal dict behavior. Below is my class implementation. I've left out a lot of the details, but most of the methods you can just delegate to the internal ordered dict self.store. You'll probably want to make sure you implement all of the dict methods, which you can find here.from collections import MutableMapping, OrderedDict
class LruCache(MutableMapping):
def __init__(self, max_size=10, *args, **kwargs):
self.max_size = max_size
self.store = OrderedDict()
self.update(dict(*args, **kwargs))
def __getitem__(self, key):
# Move the key to the end of the cache
try:
self.store[key] = self.store.pop(key)
return self.store[key]
except KeyError:
if not hasattr(self, '__missing__'):
raise
else:
return self.__missing__(key)
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def __setitem__(self, key, value):
try:
self.store.pop(key)
except KeyError:
# We just want to move it to the back, so ignore it
pass
self.store[key] = value
while len(self) > self.max_size:
self.store.popitem(last=False)
def __delitem__(self, key):
del self.store[key]
def __iter__(self):
return iter(self.store)
def __len__(self):
return len(self.store)Code Snippets
from collections import MutableMapping, OrderedDict
class LruCache(MutableMapping):
def __init__(self, max_size=10, *args, **kwargs):
self.max_size = max_size
self.store = OrderedDict()
self.update(dict(*args, **kwargs))
def __getitem__(self, key):
# Move the key to the end of the cache
try:
self.store[key] = self.store.pop(key)
return self.store[key]
except KeyError:
if not hasattr(self, '__missing__'):
raise
else:
return self.__missing__(key)
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def __setitem__(self, key, value):
try:
self.store.pop(key)
except KeyError:
# We just want to move it to the back, so ignore it
pass
self.store[key] = value
while len(self) > self.max_size:
self.store.popitem(last=False)
def __delitem__(self, key):
del self.store[key]
def __iter__(self):
return iter(self.store)
def __len__(self):
return len(self.store)Context
StackExchange Code Review Q#135661, answer score: 6
Revisions (0)
No revisions yet.