Share my two Python solutions


  • 3
    D

    The first one uses deque and dictionary, quite simple but slow

    from collections import deque
    
    class LRUCache:
        # @param capacity, an integer
        def __init__(self, capacity):
            self.capacity = capacity
            # use dictionary to simulate LRU cache, deque to store cache usage order
            self.cache = dict()
            self.order = deque()
    
        # @return an integer
        def get(self, key):
            if key in self.cache:
                value = self.cache[key]
                # remove and append to tail
                self.order.remove(key)
                self.order.append(key)
                return value
            else:
                return -1
    
        # @param key, an integer
        # @param value, an integer
        # @return nothing
        def set(self, key, value):
            if key in self.cache:
                # remove key
                self.order.remove(key)
            self.cache[key] = value
            self.order.append(key)
            # remove an item (oldest) if cache is full
            if len(self.cache) > self.capacity:
                oldest = self.order.popleft()
                del self.cache[oldest]
    

    The 2nd one uses OrderedDict. Simpler and much faster

    from collections import OrderedDict
    
    class LRUCache:
        # @param capacity, an integer
        def __init__(self, capacity):
            self.capacity = capacity
            # use OrderedDict to simulate LRU cache
            self.cache = OrderedDict()
    
        # @return an integer
        def get(self, key):
            if key in self.cache:
                value = self.cache[key]
                # remove key then add it again
                del self.cache[key]
                self.cache[key] = value
                return value
            else:
                return -1
    
        # @param key, an integer
        # @param value, an integer
        # @return nothing
        def set(self, key, value):
            if key in self.cache:
                # remove key if already in cache
                del self.cache[key]
            self.cache[key] = value
            # pop an item (oldest) if cache is full
            if len(self.cache) > self.capacity:
                self.cache.popitem(False)

  • 0
    L

    self.order.remove(key) is O(n)

    If the queue is very long, this statement costs much time.

    OrderedDict is a nice solution.


Log in to reply
 

Looks like your connection to LeetCode Discuss was lost, please wait while we try to reconnect.