This commit is contained in:
Jack Tang 2020-04-12 20:42:30 +08:00
commit a9af69f2dd
7 changed files with 1350 additions and 0 deletions

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
tests/test

32
README.md Normal file
View File

@ -0,0 +1,32 @@
# LRU cache
The standard implemenation of LRU cache (hash table + doubly-linked list).
All operations are in time complexity of O(1).
This implementation is *not* thread-safe.
## Usage
```
# create a new LRU cache with initial capacity of 1 items
let cache = newLRUCache[int, string](1)
cache[1] = "a"
cache[2] = "b"
# key 1 is not in cache, because key 1 is eldest and capacity is only 1
assert: 1 notin cache
assert: 2 in cache
# increase capacity and add key 1
cache.capacity = 2
cache[1] = "a"
assert: 1 in cache
assert: 2 in cache
# update recentness of key 2 and add key 3, then key 1 will be discarded.
echo cache[2]
cache[3] = "c"
assert: 1 notin cache
assert: 2 in cache
assert: 3 in cache
```

1031
docs/index.html Normal file

File diff suppressed because it is too large Load Diff

17
lrucache.nimble Normal file
View File

@ -0,0 +1,17 @@
# Package
version = "1.0.0"
author = "Jack Tang"
description = "Least recently used (LRU) cache"
license = "MIT"
srcDir = "src"
# Dependencies
requires "nim >= 1.0.0"
task docgen, "generate docs":
exec "nim doc --out:docs/index.html src/lrucache.nim"

125
src/lrucache.nim Normal file
View File

@ -0,0 +1,125 @@
import lists, tables
type
# no need to use ref, since DoublyLinkedNode is already a ref
Node[K,T] = object
key: K
val: T
LRUCache*[K, T] = ref object
capacity: int
list: DoublyLinkedList[Node[K,T]]
table: Table[K, DoublyLinkedNode[Node[K,T]]]
proc newLRUCache*[K,T](capacity: int): LRUCache[K,T] =
## Create a new Least-Recently-Used (LRU) cache that store the last `capacity`-accessed items.
LRUCache[K,T](
capacity: capacity,
list: initDoublyLinkedList[Node[K,T]](),
table: initTable[K, DoublyLinkedNode[Node[K,T]]]( rightSize(capacity) )
)
proc resize[K,T](cache: LRUCache[K,T]) =
while cache.len > cache.capacity:
let t = cache.list.tail
cache.table.del(t.value.key)
cache.list.remove t
proc addNewNode[K,T](cache: LRUCache[K,T], key: K, val: T) =
# create new node
let node = newDoublyLinkedNode[Node[K,T]](
Node[K,T](key: key, val: val)
)
# put on table and prepend new node
cache.table[key] = node
cache.list.prepend node
# remove old node if exceed capacity
cache.resize()
proc capacity*[K,T](cache: LRUCache[K,T]): int = cache.capacity
proc `capacity=`*[K,T](cache: LRUCache[K,T], capacity: int) =
cache.capacity = capacity
cache.resize()
proc len*[K,T](cache: LRUCache[K,T]): int =
## Return number of key in cache
cache.table.len
proc contains*[K,T](cache: LRUCache[K,T], key: K): bool =
## Check whether key in cache. Does *NOT* update recentness.
cache.table.contains(key)
proc peek*[K,T](cache: LRUCache[K,T], key: K): T =
## Read value by key, but *NOT* update recentness.
## Raise `KeyError` if `key` is not in `cache`.
let node = cache.table[key]
result = node.value.val
proc del*[K,T](cache: LRUCache[K,T], key: K) =
## Delete key in cache. Does nothing if key is not in cache.
let node = cache.table.getOrDefault(key, nil)
if not node.isNil:
cache.table.del(key)
cache.list.remove(node)
proc clear*[K,T](cache: LRUCache[K,T]) =
cache.list = initDoublyLinkedList[Node[K,T]]()
cache.table.clear()
proc `[]`*[K,T](cache: LRUCache[K,T], key: K): T =
## Read value from `cache` by `key` and update recentness
## Raise `KeyError` if `key` is not in `cache`.
let node = cache.table[key] # may raise KeyError
result = node.value.val
cache.list.remove node
cache.list.prepend node
proc `[]=`*[K,T](cache: LRUCache[K,T], key: K, val: T) =
## Put value `v` in cache with key `k`.
## Remove least recently used value from cache if length exceeds capacity.
# read current node
var node = cache.table.getOrDefault(key, nil)
if node.isNil:
cache.addNewNode(key, val)
else:
# set value
node.value.val = val
# move to head
cache.list.remove node
cache.list.prepend node
proc get*[K,T](cache: LRUCache[K,T], key: K): T =
## Alias of `cache[key]`
cache[key]
proc put*[K,T](cache: LRUCache[K,T], key: K, val: T): T =
## Alias of `cache[key] = val`
cache[key] = val
proc getOrDefault*[K,T](cache: LRUCache[K,T], key: K, val: T): T =
## Similar to get, but return `val` if `key` is not in `cache`
let node = cache.table.getOrDefault(key, nil)
if node.isNil:
result = val
else:
result = node.value.val
proc getOrPut*[K,T](cache: LRUCache[K,T], key: K, val: T): T =
## Similar to `get`, but put and return `val` if `key` is not in `cache`
let node = cache.table.getOrDefault(key, nil)
if not node.isNil:
result = node.value.val
else:
result = val
cache.addNewNode(key, val)
proc isEmpty*[K,T](cache: LRUCache[K,T]): bool =
## Equivalent to `cache.len == 0`
cache.len == 0
proc isFull*[K,T](cache: LRUCache[K,T]): bool =
## Equivalent to `cache.len == cache.capacity`
cache.len == cache.capacity

1
tests/config.nims Normal file
View File

@ -0,0 +1 @@
switch("path", "$projectDir/../src")

143
tests/test.nim Normal file
View File

@ -0,0 +1,143 @@
import unittest
import lrucache
suite "LRUCache":
test "put, get, del":
let cache = newLRUCache[int, int](100)
# put
for i in 1..10: cache[i] = i
check: cache.len == 10
# get
for i in 1..10: check: cache[i] == i
# del
for i in 1..10: cache.del(i)
check: cache.len == 0
test "remove items if capacity exceeded":
let cache = newLRUCache[int, int](5)
# put
for i in 1..10: cache[i] = i
check: cache.len == 5
# check
for i in 1..5:
check: i notin cache
for i in 6..10:
check: i in cache
test "remvoe least recently used item if capacity exceeded":
let cache = newLRUCache[int, int](2)
cache[1] = 1
cache[2] = 2
cache[3] = 3
check: 1 notin cache
check: 2 in cache
check: 3 in cache
# access 2
discard cache[2]
cache[1] = 1
check: 1 in cache
check: 2 in cache
check: 3 notin cache
test "peek should not update recentness":
let cache = newLRUCache[int, int](2)
cache[1] = 1
cache[2] = 2
# peek
check: cache.peek(1) == 1
cache[3] = 3
check: 1 notin cache
check: 2 in cache
check: 3 in cache
test "[]= should update recentness":
let cache = newLRUCache[int, int](2)
cache[1] = 1
cache[2] = 2
# peek
check: cache[1] == 1
cache[3] = 3
check: 1 in cache
check: 2 notin cache
check: 3 in cache
test "getOrDefault()":
let cache = newLRUCache[int, int](2)
check: cache.getOrDefault(1,1) == 1
check: 1 notin cache
cache[1] = 2
check: cache.getOrDefault(1,1) == 2
test "getOrPut()":
let cache = newLRUCache[int, int](2)
check: cache.getOrPut(1,1) == 1
check: 1 in cache
test "isEmpty":
let cache = newLRUCache[int, int](2)
check: cache.isEmpty
cache[1] = 1
check: not cache.isEmpty
test "isFull":
let cache = newLRUCache[int, int](1)
check: not cache.isFull
cache[1] = 1
check: cache.isFull
test "clear":
let cache = newLRUCache[int, int](10)
check: cache.isEmpty
cache[1] = 1
check: not cache.isEmpty
cache.clear()
check: cache.isEmpty
test "re-capacity dynamically":
let cache = newLRUCache[int, int](1)
cache[1] = 1
cache[2] = 2
check: 1 notin cache
check: 2 in cache
cache.capacity = 2
cache[1] = 1
check: 1 in cache
check: 2 in cache
test "README usage":
# create a new LRU cache with initial capacity of 1 items
let cache = newLRUCache[int, string](1)
cache[1] = "a"
cache[2] = "b"
# key 1 is not in cache, because key 1 is eldest and capacity is only 1
assert: 1 notin cache
assert: 2 in cache
# increase capacity and add key 1
cache.capacity = 2
cache[1] = "a"
assert: 1 in cache
assert: 2 in cache
# update recentness of key 2 and add key 3, then key 1 will be discarded.
assert: cache[2] == "b"
cache[3] = "c"
assert: 1 notin cache
assert: 2 in cache
assert: 3 in cache