Skip to content

Commit

Permalink
Merge pull request #443 from scalacenter/topic/load-io-executor
Browse files Browse the repository at this point in the history
Speed up first bloop server load
  • Loading branch information
jvican authored Apr 16, 2018
2 parents a235320 + 991c4b4 commit 24be78b
Show file tree
Hide file tree
Showing 2 changed files with 50 additions and 39 deletions.
13 changes: 6 additions & 7 deletions frontend/src/main/scala/bloop/Project.scala
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import xsbti.compile.ClasspathOptions
import _root_.monix.eval.Task
import bloop.bsp.ProjectUris
import bloop.config.{Config, ConfigDecoders}
import bloop.engine.ExecutionContext
import metaconfig.{Conf, Configured}
import org.langmeta.inputs.Input

Expand Down Expand Up @@ -58,13 +59,11 @@ object Project {
* @return The list of loaded projects.
*/
def lazyLoadFromDir(configRoot: AbsolutePath, logger: Logger): Task[List[Project]] = {
timed(logger) {
// TODO: We're not handling projects with duplicated names here.
val configFiles = loadAllFiles(configRoot)
logger.debug(s"Loading ${configFiles.length} projects from '${configRoot.syntax}'...")
val all = configFiles.iterator.map(configFile => Task(fromFile(configFile, logger))).toList
Task.gatherUnordered(all)
}
// TODO: We're not handling projects with duplicated names here.
val configFiles = loadAllFiles(configRoot)
logger.debug(s"Loading ${configFiles.length} projects from '${configRoot.syntax}'...")
val all = configFiles.iterator.map(configFile => Task(fromFile(configFile, logger))).toList
Task.gatherUnordered(all).executeOn(ExecutionContext.ioScheduler)
}

/**
Expand Down
76 changes: 44 additions & 32 deletions frontend/src/main/scala/bloop/engine/caches/ResultsCache.scala
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,16 @@ import java.util.Optional

import bloop.{Compiler, Project}
import bloop.Compiler.Result
import bloop.engine.Build
import bloop.engine.{Build, ExecutionContext}
import bloop.io.AbsolutePath
import bloop.logging.Logger
import bloop.reporter.Reporter
import monix.eval.Task
import xsbti.compile.{CompileAnalysis, MiniSetup, PreviousResult}

import scala.concurrent.Await
import scala.concurrent.duration.Duration

/**
* Maps projects to compilation results, populated by `Tasks.compile`.
*
Expand Down Expand Up @@ -63,35 +67,6 @@ final class ResultsCache private (
def addResults(ps: List[(Project, Compiler.Result)]): ResultsCache =
ps.foldLeft(this) { case (rs, (p, r)) => rs.addResult(p, r) }

private def initializeResult(project: Project, cwd: AbsolutePath): ResultsCache = {
import java.nio.file.Files
import sbt.internal.inc.FileAnalysisStore
import bloop.util.JavaCompat.EnrichOptional

def fetchPreviousResult(p: Project): Compiler.Result = {
val analysisFile = ResultsCache.pathToAnalysis(p)
if (Files.exists(analysisFile.underlying)) {
val contents = FileAnalysisStore.binary(analysisFile.toFile).get().toOption
contents match {
case Some(res) =>
logger.debug(s"Loading previous analysis for '${project.name}' from '$analysisFile'.")
val p = PreviousResult.of(Optional.of(res.getAnalysis), Optional.of(res.getMiniSetup))
val reporter = Reporter.fromAnalysis(res.getAnalysis, cwd, logger)
Result.Success(reporter, p, 0L)
case None =>
logger.debug(s"Analysis '$analysisFile' for '${project.name}' is empty.")
Result.Empty
}
} else {
logger.debug(s"Missing analysis file for project '${project.name}'")
Result.Empty
}
}

if (all.contains(project)) this
else addResult(project, fetchPreviousResult(project))
}

override def toString: String = s"ResultsCache(${successful.mkString(", ")})"
}

Expand All @@ -105,8 +80,45 @@ object ResultsCache {
PreviousResult.of(Optional.empty[CompileAnalysis], Optional.empty[MiniSetup])

def load(build: Build, cwd: AbsolutePath, logger: Logger): ResultsCache = {
build.projects.foldLeft(new ResultsCache(Map.empty, Map.empty, logger)) {
case (results, project) => results.initializeResult(project, cwd)
val handle = loadAsync(build, cwd, logger).runAsync(ExecutionContext.ioScheduler)
Await.result(handle, Duration.Inf)
}

def loadAsync(build: Build, cwd: AbsolutePath, logger: Logger): Task[ResultsCache] = {
import java.nio.file.Files
import sbt.internal.inc.FileAnalysisStore
import bloop.util.JavaCompat.EnrichOptional

def fetchPreviousResult(p: Project): Task[Compiler.Result] = {
val analysisFile = ResultsCache.pathToAnalysis(p)
if (Files.exists(analysisFile.underlying)) {
Task {
val contents = FileAnalysisStore.binary(analysisFile.toFile).get().toOption
contents match {
case Some(res) =>
logger.debug(s"Loading previous analysis for '${p.name}' from '$analysisFile'.")
val r = PreviousResult.of(Optional.of(res.getAnalysis), Optional.of(res.getMiniSetup))
val reporter = Reporter.fromAnalysis(res.getAnalysis, cwd, logger)
Result.Success(reporter, r, 0L)
case None =>
logger.debug(s"Analysis '$analysisFile' for '${p.name}' is empty.")
Result.Empty
}

}
} else {
Task.now {
logger.debug(s"Missing analysis file for project '${p.name}'")
Result.Empty
}
}
}

val all = build.projects.map(p => fetchPreviousResult(p).map(r => p -> r))
Task.gatherUnordered(all).executeOn(ExecutionContext.ioScheduler).map { projectResults =>
val cache = new ResultsCache(Map.empty, Map.empty, logger)
cache.addResults(projectResults)
cache
}
}

Expand Down

0 comments on commit 24be78b

Please sign in to comment.