Syncache WIP
This commit is contained in:
parent
632ea83670
commit
3ba1a5b55f
|
@ -7,7 +7,7 @@ import lila.common.PimpedConfig._
|
|||
final class Env(
|
||||
config: Config,
|
||||
scheduler: lila.common.Scheduler,
|
||||
notifyApi: lila.notify.NotifyApi) {
|
||||
notifyApi: lila.notify.NotifyApi)(implicit system: akka.actor.ActorSystem) {
|
||||
|
||||
private val PrismicApiUrl = config getString "prismic.api_url"
|
||||
private val PrismicCollection = config getString "prismic.collection"
|
||||
|
@ -38,5 +38,6 @@ object Env {
|
|||
lazy val current: Env = "blog" boot new Env(
|
||||
config = lila.common.PlayApp loadConfig "blog",
|
||||
scheduler = lila.common.PlayApp.scheduler,
|
||||
notifyApi = lila.notify.Env.current.api)
|
||||
notifyApi = lila.notify.Env.current.api)(
|
||||
lila.common.PlayApp.system)
|
||||
}
|
||||
|
|
|
@ -2,21 +2,29 @@ package lila.blog
|
|||
|
||||
import scala.concurrent.duration._
|
||||
|
||||
final class LastPostCache(api: BlogApi, ttl: Duration, collection: String) {
|
||||
final class LastPostCache(
|
||||
api: BlogApi,
|
||||
ttl: FiniteDuration,
|
||||
collection: String)(implicit system: akka.actor.ActorSystem) {
|
||||
|
||||
private val cache = lila.memo.MixedCache.single[List[MiniPost]](
|
||||
private val cache = new lila.memo.Syncache[Boolean, List[MiniPost]](
|
||||
name = "blog.lastPost",
|
||||
f = api.prismicApi flatMap { prismic =>
|
||||
compute = _ => fetch,
|
||||
default = _ => Nil,
|
||||
timeToLive = ttl,
|
||||
awaitTime = 1.millisecond,
|
||||
logger = logger)
|
||||
|
||||
private def fetch = {
|
||||
println("----------- fetching from prismic!")
|
||||
api.prismicApi flatMap { prismic =>
|
||||
api.recent(prismic, none, 3) map {
|
||||
_ ?? {
|
||||
_.results.toList flatMap MiniPost.fromDocument(collection)
|
||||
}
|
||||
}
|
||||
},
|
||||
timeToLive = ttl,
|
||||
default = Nil,
|
||||
awaitTime = 1.millisecond,
|
||||
logger = logger)
|
||||
}
|
||||
}
|
||||
|
||||
def apply = cache get true
|
||||
|
||||
|
|
|
@ -15,8 +15,7 @@ object Builder {
|
|||
* backed by a Cache from Google Collections.
|
||||
*/
|
||||
def cache[K, V](ttl: Duration, f: K => V): LoadingCache[K, V] =
|
||||
cacheBuilder[K, V](ttl)
|
||||
.build[K, V](f)
|
||||
cacheBuilder[K, V](ttl).build[K, V](f)
|
||||
|
||||
def expiry[K, V](ttl: Duration): Cache[K, V] =
|
||||
cacheBuilder[K, V](ttl).build[K, V]
|
||||
|
|
53
modules/memo/src/main/Syncache.scala
Normal file
53
modules/memo/src/main/Syncache.scala
Normal file
|
@ -0,0 +1,53 @@
|
|||
package lila.memo
|
||||
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
import scala.concurrent.duration._
|
||||
|
||||
import com.google.common.cache.{ LoadingCache => SyncCache }
|
||||
|
||||
final class Syncache[K, V](
|
||||
name: String,
|
||||
compute: K => Fu[V],
|
||||
default: K => V,
|
||||
timeToLive: FiniteDuration,
|
||||
awaitTime: FiniteDuration,
|
||||
resultTimeout: FiniteDuration = 5 seconds,
|
||||
logger: lila.log.Logger)(implicit system: akka.actor.ActorSystem) {
|
||||
|
||||
private val chm = new ConcurrentHashMap[K, Fu[V]]
|
||||
|
||||
private val sync = Builder.expiry[K, V](timeToLive)
|
||||
|
||||
private val loadFunction = new java.util.function.Function[K, Fu[V]] {
|
||||
def apply(k: K) = {
|
||||
println(s"*** $name chm put $k")
|
||||
compute(k).withTimeout(
|
||||
duration = resultTimeout,
|
||||
error = lila.common.LilaException(s"MixedCache2 $name $k timed out after $resultTimeout")
|
||||
).addEffects(
|
||||
err => {
|
||||
println(s"*** $name chm fail $k")
|
||||
logger.branch(name).warn(s"$err key=$k")
|
||||
chm remove k
|
||||
},
|
||||
res => {
|
||||
println(s"*** $name sync put $k")
|
||||
sync.put(k, res)
|
||||
chm remove k
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
def get(k: K): V = Option(sync getIfPresent k) match {
|
||||
case Some(v) =>
|
||||
println(s"*** $name hit $k")
|
||||
v
|
||||
case None =>
|
||||
println(s"*** $name miss $k")
|
||||
chm.computeIfAbsent(k, loadFunction)
|
||||
default(k)
|
||||
}
|
||||
|
||||
def invalidate(k: K): Unit = sync invalidate k
|
||||
}
|
Loading…
Reference in a new issue