Skip to content

Commit

Permalink
Redesign best effort compilation to not have to remove successful ana…
Browse files Browse the repository at this point in the history
…lyses

Also better cleanup directories
  • Loading branch information
jchyb committed Oct 16, 2024
1 parent 46a5bd9 commit 5f822e1
Show file tree
Hide file tree
Showing 5 changed files with 161 additions and 153 deletions.
42 changes: 13 additions & 29 deletions backend/src/main/scala/bloop/BloopClassFileManager.scala
Original file line number Diff line number Diff line change
Expand Up @@ -219,36 +219,20 @@ final class BloopClassFileManager(
clientTracer.traceTaskVerbose("copy new products to external classes dir") { _ =>
val config =
ParallelOps.CopyConfiguration(5, CopyMode.ReplaceExisting, Set.empty, Set.empty)
val clientExternalBestEffortDir =
clientExternalClassesDir.underlying.resolve("META-INF/best-effort")

// Deletes all previous best-effort artifacts to get rid of all of the outdated ones.
// Since best effort compilation is not affected by incremental compilation,
// all relevant files are always produced by the compiler. Because of this,
// we can always delete all previous files and copy newly created ones
// without losing anything in the process.
val deleteClientExternalBestEffortDir =
Task {
if (Files.exists(clientExternalBestEffortDir)) {
BloopPaths.delete(AbsolutePath(clientExternalBestEffortDir))
}

ParallelOps
.copyDirectories(config)(
newClassesDir,
clientExternalClassesDir.underlying,
inputs.ioScheduler,
enableCancellation = false,
inputs.logger
)
.map { walked =>
readOnlyCopyDenylist.++=(walked.target)
()
}.memoize

deleteClientExternalBestEffortDir *>
ParallelOps
.copyDirectories(config)(
newClassesDir,
clientExternalClassesDir.underlying,
inputs.ioScheduler,
enableCancellation = false,
inputs.logger
)
.map { walked =>
readOnlyCopyDenylist.++=(walked.target)
()
}
.flatMap(_ => deleteAfterCompilation)
}
.flatMap(_ => deleteAfterCompilation)
}
}
)
Expand Down
140 changes: 81 additions & 59 deletions backend/src/main/scala/bloop/Compiler.scala
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ import java.io.PrintWriter
import java.io.StringWriter
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.Paths
import java.util.Optional
import java.util.concurrent.Executor

Expand Down Expand Up @@ -49,7 +48,6 @@ import xsbti.T2
import xsbti.VirtualFileRef
import xsbti.compile._


case class CompileInputs(
scalaInstance: ScalaInstance,
compilerCache: CompilerCache,
Expand Down Expand Up @@ -295,11 +293,6 @@ object Compiler {
)
}

val previousWasBestEffort = compileInputs.previousCompilerResult match {
case Failed(_, _, _, _, Some(BestEffortProducts(_, _, _))) => true
case _ => !firstCompilation
}

val isFatalWarningsEnabled: Boolean =
compileInputs.scalacOptions.exists(_ == "-Xfatal-warnings")
def getInputs(compilers: Compilers): Inputs = {
Expand All @@ -325,7 +318,8 @@ object Compiler {
val compilerCache = new FreshCompilerCache
val cacheFile = compileInputs.baseDirectory.resolve("cache").toFile
val incOptions = {
val disableIncremental = java.lang.Boolean.getBoolean("bloop.zinc.disabled")
val disableIncremental =
java.lang.Boolean.getBoolean("bloop.zinc.disabled")
// Don't customize class file manager bc we pass our own to the zinc APIs directly
IncOptions.create().withEnabled(!disableIncremental)
}
Expand Down Expand Up @@ -354,7 +348,7 @@ object Compiler {

def cancel(): Unit = {
// Complete all pending promises when compilation is cancelled
logger.debug(s"Cancelling compilation from ${readOnlyClassesDirPath} to ${newClassesDirPath}")
logger.info(s"Cancelling compilation from ${readOnlyClassesDirPath} to ${newClassesDirPath}")
compileInputs.cancelPromise.trySuccess(())

// Always report the compilation of a project no matter if it's completed
Expand Down Expand Up @@ -385,6 +379,9 @@ object Compiler {
reporter.reportStartCompilation(previousProblems, wasPreviousSuccessful)
val fileManager = newFileManager

val shouldAttemptRestartingCompilationForBestEffort =
firstCompilation && !isBestEffortDep && previousAnalysis.isDefined

// Manually skip redundant best-effort compilations. This is necessary because compiler
// phases supplying the data needed to skip compilations in zinc remain unimplemented for now.
val noopBestEffortResult = compileInputs.previousCompilerResult match {
Expand Down Expand Up @@ -470,7 +467,8 @@ object Compiler {
fileManager,
cancelPromise,
tracer,
classpathOptions
classpathOptions,
!(isBestEffortMode && isBestEffortDep)
)
.materialize
.doOnCancel(Task(cancel()))
Expand All @@ -483,11 +481,9 @@ object Compiler {
() => elapsed,
reporter,
backgroundTasksWhenNewSuccessfulAnalysis,
allInvalidatedClassFilesForProject,
allInvalidatedExtraCompileProducts,
previousSuccessfulProblems,
errorCause = None,
previousWasBestEffort
shouldAttemptRestartingCompilationForBestEffort
)
case Success(result) =>
// Report end of compilation only after we have reported all warnings from previous runs
Expand Down Expand Up @@ -553,16 +549,25 @@ object Compiler {
val clientClassesDir = clientClassesObserver.classesDir
clientLogger.debug(s"Triggering background tasks for $clientClassesDir")
val updateClientState =
updateExternalClassesDirWithReadOnly(
clientClassesDir,
clientTracer,
clientLogger,
compileInputs,
readOnlyClassesDir,
readOnlyCopyDenylist,
allInvalidatedClassFilesForProject,
allInvalidatedExtraCompileProducts
)
Task
.gatherUnordered(
List(
deleteClientExternalBestEffortDirTask(clientClassesDir),
deleteBestEffortDir()
)
)
.flatMap { _ =>
updateExternalClassesDirWithReadOnly(
clientClassesDir,
clientTracer,
clientLogger,
compileInputs,
readOnlyClassesDir,
readOnlyCopyDenylist,
allInvalidatedClassFilesForProject,
allInvalidatedExtraCompileProducts
)
}

val writeAnalysisIfMissing = {
if (compileOut.analysisOut.exists) Task.unit
Expand All @@ -586,10 +591,9 @@ object Compiler {
}
.flatMap(clientClassesObserver.nextAnalysis)

deleteBestEffortDir() *> Task
Task
.gatherUnordered(
List(
deleteBestEffortDir,
deleteNewClassesDir,
updateClientState,
writeAnalysisIfMissing,
Expand All @@ -598,7 +602,8 @@ object Compiler {
)
.flatMap(_ => publishClientAnalysis)
.onErrorHandleWith(err => {
clientLogger.debug("Caught error in background tasks"); clientLogger.trace(err);
clientLogger.debug("Caught error in background tasks");
clientLogger.trace(err);
Task.raiseError(err)
})
.doOnFinish(_ => Task(clientReporter.reportEndCompilation()))
Expand Down Expand Up @@ -642,10 +647,19 @@ object Compiler {
): Task[Unit] = {
val clientClassesDir = clientClassesObserver.classesDir
val successBackgroundTasks =
deleteBestEffortDir() *> Task.gatherUnordered(
backgroundTasksWhenNewSuccessfulAnalysis
.map(f => f(clientClassesDir, clientReporter, clientTracer))
)
Task
.gatherUnordered(
List(
deleteBestEffortDir(),
deleteClientExternalBestEffortDirTask(clientClassesDir)
)
)
.flatMap { _ =>
Task.gatherUnordered(
backgroundTasksWhenNewSuccessfulAnalysis
.map(f => f(clientClassesDir, clientReporter, clientTracer))
)
}
val persistTask =
persistAnalysis(analysisForFutureCompilationRuns, compileOut.analysisOut)
val initialTasks = List(persistTask, successBackgroundTasks)
Expand Down Expand Up @@ -724,11 +738,9 @@ object Compiler {
() => elapsed,
reporter,
backgroundTasksWhenNewSuccessfulAnalysis,
allInvalidatedClassFilesForProject,
allInvalidatedExtraCompileProducts,
previousSuccessfulProblems,
errorCause = Some(cause),
previousWasBestEffort
shouldAttemptRestartingCompilationForBestEffort
)

case Failure(_: xsbti.CompileCancelled) => handleCancellation
Expand All @@ -747,7 +759,7 @@ object Compiler {
t.printStackTrace()
val sw = new StringWriter()
t.printStackTrace(new PrintWriter(sw))
logger.info(sw.toString())
logger.error(sw.toString())
val backgroundTasks =
toBackgroundTasks(backgroundTasksForFailedCompilation.toList)
val failedProblems = findFailedProblems(reporter, None)
Expand Down Expand Up @@ -954,14 +966,11 @@ object Compiler {
elapsed: () => Long,
reporter: ZincReporter,
backgroundTasksWhenNewSuccessfulAnalysis: mutable.ListBuffer[CompileBackgroundTasks.Sig],
allInvalidatedClassFilesForProject: mutable.HashSet[File],
allInvalidatedExtraCompileProducts: mutable.HashSet[File],
previousSuccessfulProblems: List[ProblemPerPhase],
errorCause: Option[xsbti.CompileFailed],
previousWasBestEffort: Boolean
shouldAttemptRestartingCompilation: Boolean
): Result = {
val uniqueInputs = compileInputs.uniqueInputs
val readOnlyClassesDir = compileOut.internalReadOnlyClassesDir.underlying
val newClassesDir = compileOut.internalNewClassesDir.underlying

reporter.processEndCompilation(
Expand All @@ -978,7 +987,7 @@ object Compiler {
)

val products = CompileProducts(
readOnlyClassesDir,
newClassesDir, // let's not use readonly dir
newClassesDir,
noOpPreviousResult,
noOpPreviousResult,
Expand All @@ -995,18 +1004,25 @@ object Compiler {
): Task[Unit] = {
val clientClassesDir = clientClassesObserver.classesDir
val successBackgroundTasks =
backgroundTasksWhenNewSuccessfulAnalysis
.map(f => f(clientClassesDir, clientReporter, clientTracer))
val allClientSyncTasks = Task.gatherUnordered(successBackgroundTasks.toList).flatMap { _ =>
deleteClientExternalBestEffortDirTask(clientClassesDir).flatMap { _ =>
Task.gatherUnordered(
backgroundTasksWhenNewSuccessfulAnalysis
.map(f => f(clientClassesDir, clientReporter, clientTracer))
)
}
val allClientSyncTasks = successBackgroundTasks.flatMap { _ =>
// Only start this task after the previous IO tasks in the external dir are done
Task {
// Delete everything outside of betasty and semanticdb
val deletedCompileProducts =
BloopClassFileManager.supportedCompileProducts.filter(_ != ".betasty") :+ ".class"
Files
.walk(clientClassesDir.underlying)
.filter(path => if (Files.exists(path)) Files.isRegularFile(path) else false)
.filter(path => deletedCompileProducts.exists(path.toString.endsWith(_)))
.filter(path =>
Files.isRegularFile(path) && deletedCompileProducts.exists(
path.toString.endsWith(_)
)
)
.forEach(path => if (Files.exists(path)) Files.delete(path))
}.map(_ => ())
}
Expand All @@ -1015,22 +1031,12 @@ object Compiler {
}
}

val recompile =
if (
!previousWasBestEffort && !(compileOut.internalReadOnlyClassesDir.exists && BloopPaths
.list(compileOut.internalReadOnlyClassesDir)
.length == 0)
) {
if (compileOut.analysisOut.exists) BloopPaths.delete(compileOut.analysisOut)
BloopPaths.delete(compileOut.internalReadOnlyClassesDir)
Files.createDirectories(Paths.get(compileOut.internalReadOnlyClassesDir.toString))
BloopPaths.delete(compileOut.internalNewClassesDir)
Files.createDirectories(Paths.get(compileOut.internalNewClassesDir.toString))
true
} else false
if (shouldAttemptRestartingCompilation) {
BloopPaths.delete(compileOut.internalNewClassesDir)
}

val newHash =
if (previousWasBestEffort)
if (!shouldAttemptRestartingCompilation)
BestEffortUtils.hashResult(
products.newClassesDir,
compileInputs.sources,
Expand All @@ -1043,7 +1049,7 @@ object Compiler {
None,
elapsed(),
backgroundTasksExecution,
Some(BestEffortProducts(products, newHash, recompile))
Some(BestEffortProducts(products, newHash, shouldAttemptRestartingCompilation))
)
}

Expand Down Expand Up @@ -1211,4 +1217,20 @@ object Compiler {
}
}
}

// Deletes all previous best-effort artifacts to get rid of all of the outdated ones.
// Since best effort compilation is not affected by incremental compilation,
// all relevant files are always produced by the compiler. Because of this,
// we can always delete all previous files and copy newly created ones
// without losing anything in the process.
def deleteClientExternalBestEffortDirTask(clientClassesDir: AbsolutePath) = {
val clientExternalBestEffortDir =
clientClassesDir.underlying.resolve("META-INF/best-effort")
Task {
if (Files.exists(clientExternalBestEffortDir)) {
BloopPaths.delete(AbsolutePath(clientExternalBestEffortDir))
}
()
}.memoize
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,8 @@ object BloopZincCompiler {
manager: ClassFileManager,
cancelPromise: Promise[Unit],
tracer: BraveTracer,
classpathOptions: ClasspathOptions
classpathOptions: ClasspathOptions,
withPreviousResult: Boolean
): Task[CompileResult] = {
val config = in.options()
val setup = in.setup()
Expand All @@ -81,8 +82,8 @@ object BloopZincCompiler {
scalacOptions,
javacOptions,
classpathOptions,
in.previousResult.analysis.toOption,
in.previousResult.setup.toOption,
if (withPreviousResult) in.previousResult.analysis.toOption else None,
if (withPreviousResult) in.previousResult.setup.toOption else None,
perClasspathEntryLookup,
reporter,
order,
Expand Down
25 changes: 25 additions & 0 deletions docs/contributing-guide.md
Original file line number Diff line number Diff line change
Expand Up @@ -242,3 +242,28 @@ indices.
- Push the new tag and wait for the release
- Announce the release after the release notes are published in the most recent
release.


# Best effort compilation pipeline

As of writing this part of the doc this is an experimental set of settings implemented
in the Scala 3 compiler (starting with 3.5.x). They allow the compiler to return artifacts
even when the compilation fails (returning `.betasty` files instead of `.class` and `.tasty`).
It also at this point does not support incremental compilation. This all requires special
handling from the build tool, mostly located in `Compiler.scala`, `CompileTask.scala`
and `CompileGraph.scala`:
- We save best effort artifacts separately, and allow dependent projects to compile using
that, even when the compilation has failed. If the project compiles we discard the best effort
artifacts.
- First, we try compiling partially (only the modified files), expecting regular successful compilation
- If that at some point fails, we discard the immediate results and recompile the whole module
expecting .betasty files. We do not ever move them to a readOnly directory. That readOnly directory
is also not used in dependent compilations.
- We do not try to recompile if we know we are compiling the whole module to begin with (e.g. because we
are depending on .betasty from different project, or because this is the first compilation and we
do not have any previous incremental compilation analysis).
- If we try to recompile a module that we previously compiled for .betasty, we once again, try to
recompile it 2 times - once incrementally expecting success (recompiling all files changed since
the last successful compilation, as dictated by the incremental compilation analysis) and then
recompile all - this works out to be faster than discarding the last successful result and jumping
between full successful recompilation and full best effort recompilation.
Loading

0 comments on commit 5f822e1

Please sign in to comment.