From ae9832c6b73629e9641f49dd7d2258affa767007 Mon Sep 17 00:00:00 2001 From: Donne Martin Date: Fri, 3 Mar 2017 18:53:54 -0800 Subject: [PATCH] Add LRU Cache solution --- .../lru_cache/__init__.py | 0 .../lru_cache/lru_cache.ipynb | 140 ++++++++++++++++++ .../lru_cache/lru_cache.py | 61 ++++++++ 3 files changed, 201 insertions(+) create mode 100644 solutions/object_oriented_design/lru_cache/__init__.py create mode 100644 solutions/object_oriented_design/lru_cache/lru_cache.ipynb create mode 100644 solutions/object_oriented_design/lru_cache/lru_cache.py diff --git a/solutions/object_oriented_design/lru_cache/__init__.py b/solutions/object_oriented_design/lru_cache/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/solutions/object_oriented_design/lru_cache/lru_cache.ipynb b/solutions/object_oriented_design/lru_cache/lru_cache.ipynb new file mode 100644 index 0000000..e78966d --- /dev/null +++ b/solutions/object_oriented_design/lru_cache/lru_cache.ipynb @@ -0,0 +1,140 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This notebook was prepared by [Donne Martin](https://github.com/donnemartin). Source and license info is on [GitHub](https://github.com/donnemartin/system-design-primer-primer)." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Design an LRU cache" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Constraints and assumptions\n", + "\n", + "* What are we caching?\n", + " * We are cahing the results of web queries\n", + "* Can we assume inputs are valid or do we have to validate them?\n", + " * Assume they're valid\n", + "* Can we assume this fits memory?\n", + " * Yes" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Solution" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Overwriting lru_cache.py\n" + ] + } + ], + "source": [ + "%%writefile lru_cache.py\n", + "class Node(object):\n", + "\n", + " def __init__(self, results):\n", + " self.results = results\n", + " self.next = next\n", + "\n", + "\n", + "class LinkedList(object):\n", + "\n", + " def __init__(self):\n", + " self.head = None\n", + " self.tail = None\n", + "\n", + " def move_to_front(self, node): # ...\n", + " def append_to_front(self, node): # ...\n", + " def remove_from_tail(self): # ...\n", + "\n", + "\n", + "class Cache(object):\n", + "\n", + " def __init__(self, MAX_SIZE):\n", + " self.MAX_SIZE = MAX_SIZE\n", + " self.size = 0\n", + " self.lookup = {} # key: query, value: node\n", + " self.linked_list = LinkedList()\n", + "\n", + " def get(self, query)\n", + " \"\"\"Get the stored query result from the cache.\n", + " \n", + " Accessing a node updates its position to the front of the LRU list.\n", + " \"\"\"\n", + " node = self.lookup[query]\n", + " if node is None:\n", + " return None\n", + " self.linked_list.move_to_front(node)\n", + " return node.results\n", + "\n", + " def set(self, results, query):\n", + " \"\"\"Set the result for the given query key in the cache.\n", + " \n", + " When updating an entry, updates its position to the front of the LRU list.\n", + " If the entry is new and the cache is at capacity, removes the oldest entry\n", + " before the new entry is added.\n", + " \"\"\"\n", + " node = self.lookup[query]\n", + " if node is not None:\n", + " # Key exists in cache, update the value\n", + " node.results = results\n", + " self.linked_list.move_to_front(node)\n", + " else:\n", + " # Key does not exist in cache\n", + " if self.size == self.MAX_SIZE:\n", + " # Remove the oldest entry from the linked list and lookup\n", + " self.lookup.pop(self.linked_list.tail.query, None)\n", + " self.linked_list.remove_from_tail()\n", + " else:\n", + " self.size += 1\n", + " # Add the new key and value\n", + " new_node = Node(results)\n", + " self.linked_list.append_to_front(new_node)\n", + " self.lookup[query] = new_node" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.4.3" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/solutions/object_oriented_design/lru_cache/lru_cache.py b/solutions/object_oriented_design/lru_cache/lru_cache.py new file mode 100644 index 0000000..3652aeb --- /dev/null +++ b/solutions/object_oriented_design/lru_cache/lru_cache.py @@ -0,0 +1,61 @@ +class Node(object): + + def __init__(self, results): + self.results = results + self.next = next + + +class LinkedList(object): + + def __init__(self): + self.head = None + self.tail = None + + def move_to_front(self, node): # ... + def append_to_front(self, node): # ... + def remove_from_tail(self): # ... + + +class Cache(object): + + def __init__(self, MAX_SIZE): + self.MAX_SIZE = MAX_SIZE + self.size = 0 + self.lookup = {} # key: query, value: node + self.linked_list = LinkedList() + + def get(self, query) + """Get the stored query result from the cache. + + Accessing a node updates its position to the front of the LRU list. + """ + node = self.lookup[query] + if node is None: + return None + self.linked_list.move_to_front(node) + return node.results + + def set(self, results, query): + """Set the result for the given query key in the cache. + + When updating an entry, updates its position to the front of the LRU list. + If the entry is new and the cache is at capacity, removes the oldest entry + before the new entry is added. + """ + node = self.lookup[query] + if node is not None: + # Key exists in cache, update the value + node.results = results + self.linked_list.move_to_front(node) + else: + # Key does not exist in cache + if self.size == self.MAX_SIZE: + # Remove the oldest entry from the linked list and lookup + self.lookup.pop(self.linked_list.tail.query, None) + self.linked_list.remove_from_tail() + else: + self.size += 1 + # Add the new key and value + new_node = Node(results) + self.linked_list.append_to_front(new_node) + self.lookup[query] = new_node \ No newline at end of file