Added MRUCache

This commit is contained in:
Sergey Chernov 2024-06-29 10:32:03 +07:00
parent cc8c9ecc5d
commit 1fd229fdb1
3 changed files with 110 additions and 21 deletions

View File

@ -5,7 +5,7 @@ plugins {
`maven-publish` `maven-publish`
} }
val serialization_version = "1.6.4-SNAPSHOT" val serialization_version = "1.6.5-SNAPSHOT"
group = "net.sergeych" group = "net.sergeych"
version = "0.1.5-SNAPSHOT" version = "0.1.5-SNAPSHOT"
@ -27,26 +27,9 @@ kotlin {
useJUnitPlatform() useJUnitPlatform()
} }
} }
js(IR) { js {
browser { browser()
testTask { nodejs()
useKarma {
// /home/sergeych/snap/firefox/common/.mozilla/firefox/iff469o9.default
// /home/sergeych/snap/firefox/common/.mozilla/firefox/iff469o9.default
// useFirefox()
useChromeHeadless()
// useSafari()
}
}
// commonWebpackConfig {
// cssSupport.enabled = true
// }
}
nodejs {
testTask {
}
}
} }
// macosArm64() // macosArm64()

View File

@ -0,0 +1,64 @@
package net.sergeych.bintools
/**
* Most Recently Used keys Cache.
* Maintains the specified size, removed least used elements on insertion. Element usage is
* when it is inserted, updated or accessed (with [get]). Least recently used (LRU) keys
* are automatically removed to maintain the [maxSize].
*
* Note that the cost, [MRUCache] is slower than [MutableMap].
*/
class MRUCache<K,V>(val maxSize: Int,
private val cache: LinkedHashMap<K,V> = LinkedHashMap()
): MutableMap<K,V> by cache {
private fun checkSize() {
while(cache.size > maxSize) {
cache.remove(cache.keys.first())
}
}
/**
* Put the [value] associated with [key] which becomes MRU whether it existed in the cache or was added now.
*
* If [size] == [maxSize] LRU key will be dropped.
*
* @return old value for the [key] or null
*/
override fun put(key: K, value: V): V? {
// we need it to become MRU, so we remove it to clear its position
val oldValue = cache.remove(key)
// now we always add, not update, so it will become MRU element:
cache.put(key,value).also { checkSize() }
return oldValue
}
/**
* Put all the key-value pairs, this is exactly same as calling [put] in the same
* order. Note that is the [from] map is not linked and its size is greater than
* [maxSize], some unpredictable keys will not be added. To be exact, only last
* [maxSize] keys will be added by the order providing by [from] map entries
* enumerator.
*
* If from is [LinkedHashMap] or like, onl
*/
override fun putAll(from: Map<out K, V>) {
// maybe we should optimize it not to add unnecessary first keys
for( e in from) {
put(e.key,e.value)
checkSize()
}
}
/**
* Get the value associated with the [key]. It makes the [key] a MRU (last to delete)
*/
override fun get(key: K): V? {
return cache[key]?.also {
cache.remove(key)
cache[key] = it
}
}
override fun toString(): String = cache.toString()
}

View File

@ -1,7 +1,11 @@
package bintools package bintools
import net.sergeych.bintools.MRUCache
import net.sergeych.bintools.toDump import net.sergeych.bintools.toDump
import kotlin.test.Test import kotlin.test.Test
import kotlin.test.assertEquals
import kotlin.test.assertNull
import kotlin.test.assertTrue
class TestTools { class TestTools {
@Test @Test
@ -16,4 +20,42 @@ class TestTools {
println(res.toDump()) println(res.toDump())
} }
} }
@Test
fun testCache() {
val cache = MRUCache<Int,String>(3)
cache.putAll( mapOf(1 to "one", 2 to "two", 3 to "three", 4 to "four" ) )
assertNull(cache[0])
// this actually should reset MRU for 2:
assertEquals("two", cache[2])
assertNull(cache[1])
assertEquals(3, cache.size)
assertTrue { 3 in cache }
assertTrue { 4 in cache }
// now MRU is 2 (checked in assertEquals above) so LRU to drop is 3!
cache[5] = "five"
assertEquals(3, cache.size)
assertTrue { 2 in cache }
assertTrue { 4 in cache }
assertTrue { 5 in cache }
cache.getOrPut(3) { "new three"}
assertEquals(3, cache.size)
assertTrue { 2 in cache }
assertTrue { 3 in cache }
assertTrue { 5 in cache }
cache[2] = "New Two"
cache[6] = "six"
// 2 is now second used
// amd 6 is MRU, oldest is therefore 5
assertEquals(3, cache.size)
assertTrue { 2 in cache }
assertTrue { 6 in cache }
assertTrue { 3 in cache }
}
} }