commit a9af69f2dd1778073014a75f9478202240399708 Author: Jack Tang Date: Sun Apr 12 20:42:30 2020 +0800 init diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..887b09d --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +tests/test \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..1362f26 --- /dev/null +++ b/README.md @@ -0,0 +1,32 @@ +# LRU cache + +The standard implemenation of LRU cache (hash table + doubly-linked list). +All operations are in time complexity of O(1). +This implementation is *not* thread-safe. + +## Usage + +``` +# create a new LRU cache with initial capacity of 1 items +let cache = newLRUCache[int, string](1) + +cache[1] = "a" +cache[2] = "b" + +# key 1 is not in cache, because key 1 is eldest and capacity is only 1 +assert: 1 notin cache +assert: 2 in cache + +# increase capacity and add key 1 +cache.capacity = 2 +cache[1] = "a" +assert: 1 in cache +assert: 2 in cache + +# update recentness of key 2 and add key 3, then key 1 will be discarded. +echo cache[2] +cache[3] = "c" +assert: 1 notin cache +assert: 2 in cache +assert: 3 in cache +``` diff --git a/docs/index.html b/docs/index.html new file mode 100644 index 0000000..658d674 --- /dev/null +++ b/docs/index.html @@ -0,0 +1,1031 @@ + + + + + + + + + + + + + + + + + +lrucache + + + + + + + + +
+
+

lrucache

+
+ +
+
+ +

+
+

Types

+
+ +
LRUCache[K; T] = ref object
+  capacity: int
+  list: DoublyLinkedList[Node[K, T]]
+  table: Table[K, DoublyLinkedNode[Node[K, T]]]
+
+
+ + + +
+ +
+
+

Procs

+
+ +
proc newLRUCache[K, T](capacity: int): LRUCache[K, T]
+
+ +Create a new Least-Recently-Used (LRU) cache that store the last capacity-accessed items. + +
+ +
proc capacity[K, T](cache: LRUCache[K, T]): int
+
+ + + +
+ +
proc capacity=[K, T](cache: LRUCache[K, T]; capacity: int)
+
+ + + +
+ +
proc len[K, T](cache: LRUCache[K, T]): int
+
+ +Return number of key in cache + +
+ +
proc contains[K, T](cache: LRUCache[K, T]; key: K): bool
+
+ +Check whether key in cache. Does NOT update recentness. + +
+ +
proc peek[K, T](cache: LRUCache[K, T]; key: K): T
+
+ +Read value by key, but NOT update recentness. Raise KeyError if key is not in cache. + +
+ +
proc del[K, T](cache: LRUCache[K, T]; key: K)
+
+ +Delete key in cache. Does nothing if key is not in cache. + +
+ +
proc clear[K, T](cache: LRUCache[K, T])
+
+ + + +
+ +
proc `[]`[K, T](cache: LRUCache[K, T]; key: K): T
+
+ +Read value from cache by key and update recentness Raise KeyError if key is not in cache. + +
+ +
proc `[]=`[K, T](cache: LRUCache[K, T]; key: K; val: T)
+
+ +Put value v in cache with key k. Remove least recently used value from cache if length exceeds capacity. + +
+ +
proc get[K, T](cache: LRUCache[K, T]; key: K): T
+
+ +Alias of cache[key] + +
+ +
proc put[K, T](cache: LRUCache[K, T]; key: K; val: T): T
+
+ +Alias of cache[key] = val + +
+ +
proc getOrDefault[K, T](cache: LRUCache[K, T]; key: K; val: T): T
+
+ +Similar to get, but return val if key is not in cache + +
+ +
proc getOrPut[K, T](cache: LRUCache[K, T]; key: K; val: T): T
+
+ +Similar to get, but put and return val if key is not in cache + +
+ +
proc isEmpty[K, T](cache: LRUCache[K, T]): bool
+
+ +Equivalent to cache.len == 0 + +
+ +
proc isFull[K, T](cache: LRUCache[K, T]): bool
+
+ +Equivalent to cache.len == cache.capacity + +
+ +
+ +
+
+ +
+ +
+
+
+ + + diff --git a/lrucache.nimble b/lrucache.nimble new file mode 100644 index 0000000..923bc97 --- /dev/null +++ b/lrucache.nimble @@ -0,0 +1,17 @@ +# Package + +version = "1.0.0" +author = "Jack Tang" +description = "Least recently used (LRU) cache" +license = "MIT" +srcDir = "src" + + + +# Dependencies + +requires "nim >= 1.0.0" + + +task docgen, "generate docs": + exec "nim doc --out:docs/index.html src/lrucache.nim" \ No newline at end of file diff --git a/src/lrucache.nim b/src/lrucache.nim new file mode 100644 index 0000000..7cabd10 --- /dev/null +++ b/src/lrucache.nim @@ -0,0 +1,125 @@ +import lists, tables + +type + # no need to use ref, since DoublyLinkedNode is already a ref + Node[K,T] = object + key: K + val: T + + LRUCache*[K, T] = ref object + capacity: int + list: DoublyLinkedList[Node[K,T]] + table: Table[K, DoublyLinkedNode[Node[K,T]]] + +proc newLRUCache*[K,T](capacity: int): LRUCache[K,T] = + ## Create a new Least-Recently-Used (LRU) cache that store the last `capacity`-accessed items. + LRUCache[K,T]( + capacity: capacity, + list: initDoublyLinkedList[Node[K,T]](), + table: initTable[K, DoublyLinkedNode[Node[K,T]]]( rightSize(capacity) ) + ) + +proc resize[K,T](cache: LRUCache[K,T]) = + while cache.len > cache.capacity: + let t = cache.list.tail + cache.table.del(t.value.key) + cache.list.remove t + +proc addNewNode[K,T](cache: LRUCache[K,T], key: K, val: T) = + # create new node + let node = newDoublyLinkedNode[Node[K,T]]( + Node[K,T](key: key, val: val) + ) + # put on table and prepend new node + cache.table[key] = node + cache.list.prepend node + # remove old node if exceed capacity + cache.resize() + +proc capacity*[K,T](cache: LRUCache[K,T]): int = cache.capacity + +proc `capacity=`*[K,T](cache: LRUCache[K,T], capacity: int) = + cache.capacity = capacity + cache.resize() + +proc len*[K,T](cache: LRUCache[K,T]): int = + ## Return number of key in cache + cache.table.len + +proc contains*[K,T](cache: LRUCache[K,T], key: K): bool = + ## Check whether key in cache. Does *NOT* update recentness. + cache.table.contains(key) + +proc peek*[K,T](cache: LRUCache[K,T], key: K): T = + ## Read value by key, but *NOT* update recentness. + ## Raise `KeyError` if `key` is not in `cache`. + let node = cache.table[key] + result = node.value.val + +proc del*[K,T](cache: LRUCache[K,T], key: K) = + ## Delete key in cache. Does nothing if key is not in cache. + let node = cache.table.getOrDefault(key, nil) + if not node.isNil: + cache.table.del(key) + cache.list.remove(node) + +proc clear*[K,T](cache: LRUCache[K,T]) = + cache.list = initDoublyLinkedList[Node[K,T]]() + cache.table.clear() + +proc `[]`*[K,T](cache: LRUCache[K,T], key: K): T = + ## Read value from `cache` by `key` and update recentness + ## Raise `KeyError` if `key` is not in `cache`. + let node = cache.table[key] # may raise KeyError + result = node.value.val + cache.list.remove node + cache.list.prepend node + +proc `[]=`*[K,T](cache: LRUCache[K,T], key: K, val: T) = + ## Put value `v` in cache with key `k`. + ## Remove least recently used value from cache if length exceeds capacity. + + # read current node + var node = cache.table.getOrDefault(key, nil) + if node.isNil: + cache.addNewNode(key, val) + else: + # set value + node.value.val = val + # move to head + cache.list.remove node + cache.list.prepend node + +proc get*[K,T](cache: LRUCache[K,T], key: K): T = + ## Alias of `cache[key]` + cache[key] + +proc put*[K,T](cache: LRUCache[K,T], key: K, val: T): T = + ## Alias of `cache[key] = val` + cache[key] = val + +proc getOrDefault*[K,T](cache: LRUCache[K,T], key: K, val: T): T = + ## Similar to get, but return `val` if `key` is not in `cache` + let node = cache.table.getOrDefault(key, nil) + if node.isNil: + result = val + else: + result = node.value.val + +proc getOrPut*[K,T](cache: LRUCache[K,T], key: K, val: T): T = + ## Similar to `get`, but put and return `val` if `key` is not in `cache` + let node = cache.table.getOrDefault(key, nil) + if not node.isNil: + result = node.value.val + else: + result = val + cache.addNewNode(key, val) + +proc isEmpty*[K,T](cache: LRUCache[K,T]): bool = + ## Equivalent to `cache.len == 0` + cache.len == 0 + +proc isFull*[K,T](cache: LRUCache[K,T]): bool = + ## Equivalent to `cache.len == cache.capacity` + cache.len == cache.capacity + diff --git a/tests/config.nims b/tests/config.nims new file mode 100644 index 0000000..3bb69f8 --- /dev/null +++ b/tests/config.nims @@ -0,0 +1 @@ +switch("path", "$projectDir/../src") \ No newline at end of file diff --git a/tests/test.nim b/tests/test.nim new file mode 100644 index 0000000..c704a2d --- /dev/null +++ b/tests/test.nim @@ -0,0 +1,143 @@ +import unittest +import lrucache + +suite "LRUCache": + + test "put, get, del": + let cache = newLRUCache[int, int](100) + + # put + for i in 1..10: cache[i] = i + check: cache.len == 10 + + # get + for i in 1..10: check: cache[i] == i + + # del + for i in 1..10: cache.del(i) + check: cache.len == 0 + + test "remove items if capacity exceeded": + let cache = newLRUCache[int, int](5) + + # put + for i in 1..10: cache[i] = i + check: cache.len == 5 + + # check + for i in 1..5: + check: i notin cache + for i in 6..10: + check: i in cache + + test "remvoe least recently used item if capacity exceeded": + let cache = newLRUCache[int, int](2) + cache[1] = 1 + cache[2] = 2 + cache[3] = 3 + check: 1 notin cache + check: 2 in cache + check: 3 in cache + + # access 2 + discard cache[2] + cache[1] = 1 + + check: 1 in cache + check: 2 in cache + check: 3 notin cache + + test "peek should not update recentness": + let cache = newLRUCache[int, int](2) + cache[1] = 1 + cache[2] = 2 + + # peek + check: cache.peek(1) == 1 + cache[3] = 3 + + check: 1 notin cache + check: 2 in cache + check: 3 in cache + + test "[]= should update recentness": + let cache = newLRUCache[int, int](2) + cache[1] = 1 + cache[2] = 2 + + # peek + check: cache[1] == 1 + cache[3] = 3 + + check: 1 in cache + check: 2 notin cache + check: 3 in cache + + test "getOrDefault()": + let cache = newLRUCache[int, int](2) + check: cache.getOrDefault(1,1) == 1 + check: 1 notin cache + cache[1] = 2 + check: cache.getOrDefault(1,1) == 2 + + test "getOrPut()": + let cache = newLRUCache[int, int](2) + check: cache.getOrPut(1,1) == 1 + check: 1 in cache + + test "isEmpty": + let cache = newLRUCache[int, int](2) + check: cache.isEmpty + cache[1] = 1 + check: not cache.isEmpty + + test "isFull": + let cache = newLRUCache[int, int](1) + check: not cache.isFull + cache[1] = 1 + check: cache.isFull + + test "clear": + let cache = newLRUCache[int, int](10) + check: cache.isEmpty + cache[1] = 1 + check: not cache.isEmpty + cache.clear() + check: cache.isEmpty + + test "re-capacity dynamically": + let cache = newLRUCache[int, int](1) + cache[1] = 1 + cache[2] = 2 + check: 1 notin cache + check: 2 in cache + + cache.capacity = 2 + cache[1] = 1 + + check: 1 in cache + check: 2 in cache + + test "README usage": + # create a new LRU cache with initial capacity of 1 items + let cache = newLRUCache[int, string](1) + + cache[1] = "a" + cache[2] = "b" + + # key 1 is not in cache, because key 1 is eldest and capacity is only 1 + assert: 1 notin cache + assert: 2 in cache + + # increase capacity and add key 1 + cache.capacity = 2 + cache[1] = "a" + assert: 1 in cache + assert: 2 in cache + + # update recentness of key 2 and add key 3, then key 1 will be discarded. + assert: cache[2] == "b" + cache[3] = "c" + assert: 1 notin cache + assert: 2 in cache + assert: 3 in cache \ No newline at end of file