remove Analysis.uid & Analysis.by, add Analysis.fk

keep the fishnet key in case we need to invalidate analysis
after discovering a bad client
pull/8637/head
Thibault Duplessis 2021-04-12 09:05:18 +02:00
parent 7e891a03ae
commit fb8c114a79
8 changed files with 65 additions and 68 deletions

View File

@ -168,10 +168,7 @@ object replay {
game.analysable option
span(
cls := "computer-analysis",
dataPanel := "computer-analysis",
title := analysis.map { a =>
s"Provided by ${usernameOrId(a.providedBy)}"
}
dataPanel := "computer-analysis"
)(trans.computerAnalysis()),
!game.isPgnImport option frag(
game.turns > 1 option span(dataPanel := "move-times")(trans.moveTimes()),

View File

@ -7,8 +7,7 @@ import lila.hub.actorApi.map.TellIfExists
final class Analyser(
gameRepo: GameRepo,
analysisRepo: AnalysisRepo,
requesterApi: RequesterApi
analysisRepo: AnalysisRepo
)(implicit ec: scala.concurrent.ExecutionContext) {
def get(game: Game): Fu[Option[Analysis]] =
@ -26,14 +25,12 @@ final class Analyser(
sendAnalysisProgress(analysis, complete = true) >>- {
Bus.publish(actorApi.AnalysisReady(game, analysis), "analysisReady")
Bus.publish(InsertGame(game), "gameSearchInsert")
requesterApi.save(analysis, game.userIds).unit
}
}
}
case Some(_) =>
analysisRepo.save(analysis) >>
sendAnalysisProgress(analysis, complete = true) >>-
requesterApi.save(analysis, Nil).unit
sendAnalysisProgress(analysis, complete = true)
}
def progress(analysis: Analysis): Funit = sendAnalysisProgress(analysis, complete = false)

View File

@ -10,7 +10,8 @@ case class Analysis(
studyId: Option[String],
infos: List[Info],
startPly: Int,
date: DateTime
date: DateTime,
fk: Option[Analysis.FishnetKey]
) {
lazy val infoAdvices: InfoAdvices = {
@ -45,7 +46,8 @@ object Analysis {
case class Analyzed(game: lila.game.Game, analysis: Analysis)
type ID = String
type ID = String
type FishnetKey = String
implicit val analysisBSONHandler = new BSON[Analysis] {
def reads(r: BSON.Reader) = {
@ -56,16 +58,18 @@ object Analysis {
studyId = r strO "studyId",
infos = Info.decodeList(raw, startPly) err s"Invalid analysis data $raw",
startPly = startPly,
date = r date "date"
date = r date "date",
fk = r strO "fk"
)
}
def writes(w: BSON.Writer, o: Analysis) =
def writes(w: BSON.Writer, a: Analysis) =
BSONDocument(
"_id" -> o.id,
"studyId" -> o.studyId,
"data" -> Info.encodeList(o.infos),
"ply" -> w.intO(o.startPly),
"date" -> w.date(o.date)
"_id" -> a.id,
"studyId" -> a.studyId,
"data" -> Info.encodeList(a.infos),
"ply" -> w.intO(a.startPly),
"date" -> w.date(a.date),
"fk" -> a.fk
)
}
}

View File

@ -8,28 +8,21 @@ import lila.db.dsl._
import lila.memo.CacheApi
import lila.user.User
final class RequesterApi(coll: Coll, cacheApi: CacheApi)(implicit ec: scala.concurrent.ExecutionContext) {
final class RequesterApi(coll: Coll)(implicit ec: scala.concurrent.ExecutionContext) {
private val formatter = format.DateTimeFormat.forPattern("yyyy-MM-dd")
private[analyse] val requesterCache =
cacheApi.notLoadingSync[Analysis.ID, lila.user.User.ID](256, "analyse.requester") {
_.expireAfterWrite(10 minutes).build()
}
def save(analysis: Analysis, playerIds: List[User.ID]): Funit =
requesterCache.getIfPresent(analysis.id) ?? { requester =>
val cost = if (playerIds has requester) 1 else 2
coll.update
.one(
$id(requester),
$inc("total" -> 1) ++
$inc(formatter.print(DateTime.now) -> cost) ++
$set("last" -> analysis.id),
upsert = true
)
.void
}
def add(requester: User.ID, ownGame: Boolean): Funit =
coll.update
.one(
$id(requester),
$inc(
"total" -> 1,
formatter.print(DateTime.now) -> (if (ownGame) 1 else 2)
),
upsert = true
)
.void
def countTodayAndThisWeek(userId: User.ID): Fu[(Int, Int)] = {
val now = DateTime.now

View File

@ -12,7 +12,7 @@ final class Analyser(
gameRepo: lila.game.GameRepo,
uciMemo: UciMemo,
evalCache: FishnetEvalCache,
limiter: Limiter
limiter: FishnetLimiter
)(implicit
ec: scala.concurrent.ExecutionContext,
system: akka.actor.ActorSystem
@ -27,7 +27,11 @@ final class Analyser(
case true => fuFalse
case _ if !game.analysable => fuFalse
case _ =>
limiter(sender, ignoreConcurrentCheck = false) flatMap { accepted =>
limiter(
sender,
ignoreConcurrentCheck = false,
ownGame = game.userIds contains sender.userId
) flatMap { accepted =>
accepted ?? {
makeWork(game, sender) flatMap { work =>
workQueue {
@ -62,33 +66,34 @@ final class Analyser(
case _ =>
import req._
val sender = Work.Sender(req.userId, none, mod = false, system = false)
(fuccess(req.unlimited) >>| limiter(sender, ignoreConcurrentCheck = true)) flatMap { accepted =>
if (!accepted) logger.info(s"Study request declined: ${req.studyId}/${req.chapterId} by $sender")
accepted ?? {
val work = makeWork(
game = Work.Game(
id = chapterId,
initialFen = initialFen,
studyId = studyId.some,
variant = variant,
moves = moves take maxPlies map (_.uci) mkString " "
),
// if black moves first, use 1 as startPly so the analysis doesn't get reversed
startPly = initialFen.flatMap(_.color).??(_.fold(0, 1)),
sender = sender
)
workQueue {
repo getSimilarAnalysis work flatMap {
_.isEmpty ?? {
lila.mon.fishnet.analysis.requestCount("study").increment()
evalCache skipPositions work.game flatMap { skipPositions =>
lila.mon.fishnet.analysis.evalCacheHits.record(skipPositions.size)
repo addAnalysis work.copy(skipPositions = skipPositions)
(fuccess(req.unlimited) >>| limiter(sender, ignoreConcurrentCheck = true, ownGame = false)) flatMap {
accepted =>
if (!accepted) logger.info(s"Study request declined: ${req.studyId}/${req.chapterId} by $sender")
accepted ?? {
val work = makeWork(
game = Work.Game(
id = chapterId,
initialFen = initialFen,
studyId = studyId.some,
variant = variant,
moves = moves take maxPlies map (_.uci) mkString " "
),
// if black moves first, use 1 as startPly so the analysis doesn't get reversed
startPly = initialFen.flatMap(_.color).??(_.fold(0, 1)),
sender = sender
)
workQueue {
repo getSimilarAnalysis work flatMap {
_.isEmpty ?? {
lila.mon.fishnet.analysis.requestCount("study").increment()
evalCache skipPositions work.game flatMap { skipPositions =>
lila.mon.fishnet.analysis.evalCacheHits.record(skipPositions.size)
repo addAnalysis work.copy(skipPositions = skipPositions)
}
}
}
}
}
} inject accepted
} inject accepted
}
}

View File

@ -39,8 +39,7 @@ final private class AnalysisBuilder(evalCache: FishnetEvalCache)(implicit
studyId = work.game.studyId,
infos = makeInfos(mergeEvalsAndCached(work, evals, cached), work.game.uciList, work.startPly),
startPly = work.startPly,
uid = work.sender.userId.some,
by = !client.lichess option client.userId.value,
fk = !client.lichess option client.key.value,
date = DateTime.now
)
) match {

View File

@ -79,7 +79,7 @@ final class Env(
mk(config.movePlies)
}
private val limiter = wire[Limiter]
private val limiter = wire[FishnetLimiter]
lazy val analyser = wire[Analyser]

View File

@ -6,15 +6,17 @@ import scala.concurrent.duration._
import lila.common.IpAddress
import lila.db.dsl._
final private class Limiter(
final private class FishnetLimiter(
analysisColl: Coll,
requesterApi: lila.analyse.RequesterApi
)(implicit ec: scala.concurrent.ExecutionContext) {
def apply(sender: Work.Sender, ignoreConcurrentCheck: Boolean): Fu[Boolean] =
def apply(sender: Work.Sender, ignoreConcurrentCheck: Boolean, ownGame: Boolean): Fu[Boolean] =
(fuccess(ignoreConcurrentCheck) >>| concurrentCheck(sender)) flatMap {
case false => fuFalse
case true => perDayCheck(sender)
} flatMap { accepted =>
(accepted ?? requesterApi.add(sender.userId, ownGame)) inject accepted
}
private val RequestLimitPerIP = new lila.memo.RateLimit[IpAddress](