@reagentx/

LRU Cache

Python

No description

fork
loading
Files
  • main.py
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
class LRUCache():
    '''
    Caches most recent result; removes off the end
    Done without using the deque class
    '''
    def __init__(self, max_items):
        self.cache = []
        self.max_items = max_items

    
    def add_item(self, item):
        '''
        If the item is in the cache, move to front
        If item is not in cache:
            If there is room, add to front
            If there is no room, remove from end + add to front
        '''
        if item in self.cache:
            # This is O(n) search time :(
            print(f'Item {item} in cache!')
            self.cache.remove(item)
        # Check if there is not room
        elif len(self.cache) >= self.max_items:
            self.cache.pop()
        # Now that we transformed the list, add to the front
        self.cache = [item] + self.cache
        print(f'Cached {item}')


    def print_cache(self):
        for i, v in enumerate(self.cache):
            print(f'Position {i}: {v}')


c = LRUCache(5)
for i in range(10):
    if i <= 5:
        c.add_item(i)
    else:
        c.add_item(i // 2)
c.print_cache()