From 0b291b3a000772e8a4f84ab44674b010b07a294c Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Mon, 26 Aug 2024 13:28:31 +0200 Subject: [PATCH 1/6] Remove betasty directory on successful compilation backgroundTasks --- backend/src/main/scala/bloop/Compiler.scala | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/backend/src/main/scala/bloop/Compiler.scala b/backend/src/main/scala/bloop/Compiler.scala index 759990d77..1f7adab25 100644 --- a/backend/src/main/scala/bloop/Compiler.scala +++ b/backend/src/main/scala/bloop/Compiler.scala @@ -500,6 +500,19 @@ object Compiler { Task(persist(out, analysis, result.setup, tracer, logger)).memoize } + // betasty files are always produced with -Ybest-effort, even when + // the compilation is successful. + // We might want to change this in the commpiler itself... + // Alternatively, whether downstream projects use betasty can be + // controlled with -Ywith-best-effort-tasty + val deleteBestEffortDir = + if (isBestEffortMode) + Task( + BloopPaths + .delete(compileOut.internalReadOnlyClassesDir.resolve("META-INF/best-effort")) + ) + else Task {} + val isNoOp = previousAnalysis.contains(analysis) if (isNoOp) { // If no-op, return previous result with updated classpath hashes @@ -564,6 +577,7 @@ object Compiler { Task .gatherUnordered( List( + deleteBestEffortDir, deleteNewClassesDir, updateClientState, writeAnalysisIfMissing, @@ -658,7 +672,7 @@ object Compiler { ) }.flatMap(clientClassesObserver.nextAnalysis) Task - .gatherUnordered(List(firstTask, secondTask)) + .gatherUnordered(List(deleteBestEffortDir, firstTask, secondTask)) .flatMap(_ => publishClientAnalysis) } From 31e62beb9fcc184e088db1df312f1099abbebf8d Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Fri, 30 Aug 2024 18:21:53 +0200 Subject: [PATCH 2/6] Delete betasty in newClassesDir, completely restart BE compilations after an error --- backend/src/main/scala/bloop/Compiler.scala | 98 +++++++++++-------- .../scala/bloop/util/BestEffortUtils.scala | 6 +- .../bloop/engine/tasks/CompileTask.scala | 76 +++++++++++++- .../tasks/compilation/CompileGraph.scala | 2 +- .../scala/bloop/bsp/BspMetalsClientSpec.scala | 9 +- 5 files changed, 140 insertions(+), 51 deletions(-) diff --git a/backend/src/main/scala/bloop/Compiler.scala b/backend/src/main/scala/bloop/Compiler.scala index 1f7adab25..34f13d22f 100644 --- a/backend/src/main/scala/bloop/Compiler.scala +++ b/backend/src/main/scala/bloop/Compiler.scala @@ -46,6 +46,7 @@ import xsbti.T2 import xsbti.VirtualFileRef import xsbti.compile._ + case class CompileInputs( scalaInstance: ScalaInstance, compilerCache: CompilerCache, @@ -249,7 +250,8 @@ object Compiler { def compile( compileInputs: CompileInputs, isBestEffortMode: Boolean, - isBestEffortDep: Boolean + isBestEffortDep: Boolean, + firstCompilation: Boolean ): Task[Result] = { val logger = compileInputs.logger val tracer = compileInputs.tracer @@ -290,6 +292,11 @@ object Compiler { ) } + val previousWasBestEffort = compileInputs.previousCompilerResult match { + case Failed(_, _, _, _, Some(BestEffortProducts(_, _, _))) => true + case _ => !firstCompilation + } + val isFatalWarningsEnabled: Boolean = compileInputs.scalacOptions.exists(_ == "-Xfatal-warnings") def getInputs(compilers: Compilers): Inputs = { @@ -383,7 +390,9 @@ object Compiler { t, elapsed, _, - bestEffortProducts @ Some(BestEffortProducts(previousCompilationResults, previousHash)) + bestEffortProducts @ Some( + BestEffortProducts(previousCompilationResults, previousHash, _) + ) ) if isBestEffortMode => val newHash = BestEffortUtils.hashResult( previousCompilationResults.newClassesDir, @@ -474,7 +483,8 @@ object Compiler { allInvalidatedClassFilesForProject, allInvalidatedExtraCompileProducts, previousSuccessfulProblems, - None + errorCause = None, + previousWasBestEffort ) case Success(result) => // Report end of compilation only after we have reported all warnings from previous runs @@ -500,16 +510,14 @@ object Compiler { Task(persist(out, analysis, result.setup, tracer, logger)).memoize } - // betasty files are always produced with -Ybest-effort, even when + // .betasty files are always produced with -Ybest-effort, even when // the compilation is successful. - // We might want to change this in the commpiler itself... - // Alternatively, whether downstream projects use betasty can be - // controlled with -Ywith-best-effort-tasty - val deleteBestEffortDir = + // We might want to change this in the compiler itself... + def deleteBestEffortDir() = if (isBestEffortMode) Task( BloopPaths - .delete(compileOut.internalReadOnlyClassesDir.resolve("META-INF/best-effort")) + .delete(compileOut.internalNewClassesDir.resolve("META-INF/best-effort")) ) else Task {} @@ -574,7 +582,8 @@ object Compiler { ) } .flatMap(clientClassesObserver.nextAnalysis) - Task + + deleteBestEffortDir() *> Task .gatherUnordered( List( deleteBestEffortDir, @@ -630,11 +639,13 @@ object Compiler { ): Task[Unit] = { val clientClassesDir = clientClassesObserver.classesDir val successBackgroundTasks = - backgroundTasksWhenNewSuccessfulAnalysis - .map(f => f(clientClassesDir, clientReporter, clientTracer)) + deleteBestEffortDir() *> Task.gatherUnordered( + backgroundTasksWhenNewSuccessfulAnalysis + .map(f => f(clientClassesDir, clientReporter, clientTracer)) + ) val persistTask = persistAnalysis(analysisForFutureCompilationRuns, compileOut.analysisOut) - val initialTasks = persistTask :: successBackgroundTasks.toList + val initialTasks = List(persistTask, successBackgroundTasks) val allClientSyncTasks = Task.gatherUnordered(initialTasks).flatMap { _ => // Only start these tasks after the previous IO tasks in the external dir are done val firstTask = updateExternalClassesDirWithReadOnly( @@ -672,7 +683,7 @@ object Compiler { ) }.flatMap(clientClassesObserver.nextAnalysis) Task - .gatherUnordered(List(deleteBestEffortDir, firstTask, secondTask)) + .gatherUnordered(List(firstTask, secondTask)) .flatMap(_ => publishClientAnalysis) } @@ -713,7 +724,8 @@ object Compiler { allInvalidatedClassFilesForProject, allInvalidatedExtraCompileProducts, previousSuccessfulProblems, - Some(cause) + errorCause = Some(cause), + previousWasBestEffort ) case Failure(_: xsbti.CompileCancelled) => handleCancellation @@ -938,7 +950,8 @@ object Compiler { allInvalidatedClassFilesForProject: mutable.HashSet[File], allInvalidatedExtraCompileProducts: mutable.HashSet[File], previousSuccessfulProblems: List[ProblemPerPhase], - errorCause: Option[xsbti.CompileFailed] + errorCause: Option[xsbti.CompileFailed], + previousWasBestEffort: Boolean ): Result = { val uniqueInputs = compileInputs.uniqueInputs val readOnlyClassesDir = compileOut.internalReadOnlyClassesDir.underlying @@ -978,49 +991,52 @@ object Compiler { backgroundTasksWhenNewSuccessfulAnalysis .map(f => f(clientClassesDir, clientReporter, clientTracer)) val allClientSyncTasks = Task.gatherUnordered(successBackgroundTasks.toList).flatMap { _ => - // Only start these tasks after the previous IO tasks in the external dir are done - val firstTask = updateExternalClassesDirWithReadOnly( - clientClassesDir, - clientTracer, - clientLogger, - compileInputs, - readOnlyClassesDir, - readOnlyCopyDenylist = mutable.HashSet.empty, - allInvalidatedClassFilesForProject, - allInvalidatedExtraCompileProducts - ) - - val secondTask = Task { + // Only start this task after the previous IO tasks in the external dir are done + Task { // Delete everything outside of betasty and semanticdb val deletedCompileProducts = BloopClassFileManager.supportedCompileProducts.filter(_ != ".betasty") :+ ".class" Files .walk(clientClassesDir.underlying) - .filter(path => Files.isRegularFile(path)) + .filter(path => if (Files.exists(path)) Files.isRegularFile(path) else false) .filter(path => deletedCompileProducts.exists(path.toString.endsWith(_))) - .forEach(Files.delete(_)) - } - Task - .gatherUnordered(List(firstTask, secondTask)) - .map(_ => ()) + .forEach(path => if (Files.exists(path)) Files.delete(path)) + }.map(_ => ()) } allClientSyncTasks.doOnFinish(_ => Task(clientReporter.reportEndCompilation())) } } - val newHash = BestEffortUtils.hashResult( - products.newClassesDir, - compileInputs.sources, - compileInputs.classpath - ) + val recompile = + if ( + !previousWasBestEffort && !(compileOut.internalReadOnlyClassesDir.exists && BloopPaths + .list(compileOut.internalReadOnlyClassesDir) + .length == 0) + ) { + if (compileOut.analysisOut.exists) BloopPaths.delete(compileOut.analysisOut) + BloopPaths.delete(compileOut.internalReadOnlyClassesDir) + Files.createDirectories(Paths.get(compileOut.internalReadOnlyClassesDir.toString)) + BloopPaths.delete(compileOut.internalNewClassesDir) + Files.createDirectories(Paths.get(compileOut.internalNewClassesDir.toString)) + true + } else false + + val newHash = + if (previousWasBestEffort) + BestEffortUtils.hashResult( + products.newClassesDir, + compileInputs.sources, + compileInputs.classpath + ) + else "" val failedProblems = findFailedProblems(reporter, errorCause) Result.Failed( failedProblems, None, elapsed(), backgroundTasksExecution, - Some(BestEffortProducts(products, newHash)) + Some(BestEffortProducts(products, newHash, recompile)) ) } diff --git a/backend/src/main/scala/bloop/util/BestEffortUtils.scala b/backend/src/main/scala/bloop/util/BestEffortUtils.scala index 318507fe0..f29cdb6df 100644 --- a/backend/src/main/scala/bloop/util/BestEffortUtils.scala +++ b/backend/src/main/scala/bloop/util/BestEffortUtils.scala @@ -10,7 +10,11 @@ import bloop.io.AbsolutePath object BestEffortUtils { - case class BestEffortProducts(compileProducts: bloop.CompileProducts, hash: String) + case class BestEffortProducts( + compileProducts: bloop.CompileProducts, + hash: String, + recompile: Boolean + ) /* Hashes results of a projects compilation, to mimic how it would have been handled in zinc. * Returns SHA-1 of a project. diff --git a/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala b/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala index 0366d8829..1f8042d85 100644 --- a/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala +++ b/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala @@ -39,6 +39,11 @@ import bloop.util.BestEffortUtils.BestEffortProducts import monix.execution.CancelableFuture import monix.reactive.MulticastStrategy import monix.reactive.Observable +import sbt.internal.inc.BloopComponentCompiler +import xsbti.compile.PreviousResult +import java.util.Optional +import xsbti.compile.MiniSetup +import xsbti.compile.CompileAnalysis object CompileTask { private implicit val logContext: DebugFilter = DebugFilter.Compilation @@ -171,9 +176,72 @@ object CompileTask { // Block on the task associated with this result that sets up the read-only classes dir waitOnReadClassesDir.flatMap { _ => + def getAllSourceInputs(project: Project): List[AbsolutePath] = { + import java.nio.file.Files + import scala.collection.JavaConverters._ + + val uniqueSourceDirs = project.sources + + val sourceExts = Seq(".scala", ".java") + val unmanagedSources: mutable.Set[AbsolutePath] = mutable.Set() + + uniqueSourceDirs.map(_.underlying).foreach { file => + if (!Files.exists(file)) () + else if ( + Files.isRegularFile(file) && sourceExts.exists(ext => file.toString.endsWith(ext)) + ) { + unmanagedSources.add(AbsolutePath(file)) + } else if (Files.isDirectory(file)) { + Files.walk(file).iterator().asScala.foreach { file => + if ( + Files.isRegularFile(file) && sourceExts + .exists(ext => file.toString.endsWith(ext)) + ) { + unmanagedSources.add(AbsolutePath(file)) + } + } + } + } + + project.sourcesGlobs.foreach { glob => + Files.walk(glob.directory.underlying).iterator().asScala.foreach { file => + if ( + Files.isRegularFile(file) && sourceExts + .exists(ext => file.toString.endsWith(ext)) && glob.matches(file) + ) { + unmanagedSources.add(AbsolutePath(file)) + } + } + } + + unmanagedSources.toList + } + // Only when the task is finished, we kickstart the compilation - def compile(inputs: CompileInputs) = - Compiler.compile(inputs, isBestEffort, isBestEffortDep) + def compile(inputs: CompileInputs) = { + val firstResult = Compiler.compile(inputs, isBestEffort, isBestEffortDep, true) + firstResult.flatMap { + case result @ Compiler.Result.Failed( + _, + _, + _, + _, + Some(BestEffortProducts(_, _, recompile)) + ) if recompile => + // we restart the compilation, starting from scratch (without any previous artifacts) + inputs.reporter.reset() + val foundSrcs = getAllSourceInputs(project) + val emptyResult = + PreviousResult.of(Optional.empty[CompileAnalysis], Optional.empty[MiniSetup]) + val newInputs = inputs.copy( + sources = foundSrcs.toArray, + previousCompilerResult = result, + previousResult = emptyResult + ) + Compiler.compile(newInputs, isBestEffort, isBestEffortDep, false) + case result => Task(result) + } + } inputs.flatMap(inputs => compile(inputs)).map { result => def runPostCompilationTasks( backgroundTasks: CompileBackgroundTasks @@ -467,10 +535,10 @@ object CompileTask { logger.debug(s"Scheduling to delete ${previousClassesDir} superseded by $newClassesDir") Some(previousClassesDir) } - case Failed(_, _, _, _, Some(BestEffortProducts(products, _))) => + case Failed(_, _, _, _, Some(BestEffortProducts(products, _, _))) => val newClassesDir = products.newClassesDir previousResult match { - case Some(Failed(_, _, _, _, Some(BestEffortProducts(previousProducts, _)))) => + case Some(Failed(_, _, _, _, Some(BestEffortProducts(previousProducts, _, _)))) => val previousClassesDir = previousProducts.newClassesDir if (previousClassesDir != newClassesDir) { logger.debug( diff --git a/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileGraph.scala b/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileGraph.scala index 8002ead70..6268ab830 100644 --- a/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileGraph.scala +++ b/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileGraph.scala @@ -453,7 +453,7 @@ object CompileGraph { .+=(newProducts.readOnlyClassesDir.toFile -> newResult) case (p, ResultBundle(f: Compiler.Result.Failed, _, _, _)) => f.bestEffortProducts.foreach { - case BestEffortProducts(products, _) => + case BestEffortProducts(products, _, _) => dependentProducts += (p -> Right(products)) } case _ => () diff --git a/frontend/src/test/scala/bloop/bsp/BspMetalsClientSpec.scala b/frontend/src/test/scala/bloop/bsp/BspMetalsClientSpec.scala index 8f28bce51..955f687cb 100644 --- a/frontend/src/test/scala/bloop/bsp/BspMetalsClientSpec.scala +++ b/frontend/src/test/scala/bloop/bsp/BspMetalsClientSpec.scala @@ -665,8 +665,9 @@ class BspMetalsClientSpec( ) loadBspState(workspace, projects, logger, "Metals", bloopExtraParams = extraParams) { state => val compiledState = state.compile(`A`, arguments = Some(List("--best-effort"))).toTestState - assertBetastyFile("A.betasty", compiledState, "A") - assertBetastyFile("B.betasty", compiledState, "A") + // we remove betasty from successful compilations + assertNoBetastyFile("A.betasty", compiledState, "A") + assertNoBetastyFile("B.betasty", compiledState, "A") assertCompilationFile("A.class", compiledState, "A") updateProject(updatedFile1WithError) val compiledState2 = state.compile(`A`, arguments = Some(List("--best-effort"))).toTestState @@ -677,8 +678,8 @@ class BspMetalsClientSpec( updateProject(updatedFile2WithoutError) val compiledState3 = state.compile(`A`, arguments = Some(List("--best-effort"))).toTestState assertNoBetastyFile("A.betasty", compiledState3, "A") - assertBetastyFile("B.betasty", compiledState3, "A") - assertBetastyFile("C.betasty", compiledState3, "A") + assertNoBetastyFile("B.betasty", compiledState3, "A") + assertNoBetastyFile("C.betasty", compiledState3, "A") assertCompilationFile("B.class", compiledState, "A") updateProject(updatedFile3WithError) val compiledState4 = state.compile(`A`, arguments = Some(List("--best-effort"))).toTestState From 251158a9aecb13dc1cde9ba6d64dbcdef52b4842 Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Mon, 9 Sep 2024 15:16:42 +0200 Subject: [PATCH 3/6] Allow problems to be cleaned up after compiler crash --- backend/src/main/scala/bloop/Compiler.scala | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/backend/src/main/scala/bloop/Compiler.scala b/backend/src/main/scala/bloop/Compiler.scala index 34f13d22f..b4ead47cf 100644 --- a/backend/src/main/scala/bloop/Compiler.scala +++ b/backend/src/main/scala/bloop/Compiler.scala @@ -1,6 +1,8 @@ package bloop import java.io.File +import java.io.PrintWriter +import java.io.StringWriter import java.nio.file.Files import java.nio.file.Path import java.util.Optional @@ -742,9 +744,13 @@ object Compiler { Result.Failed(failedProblems, None, elapsed, backgroundTasks, None) case t: Throwable => t.printStackTrace() + val sw = new StringWriter() + t.printStackTrace(new PrintWriter(sw)) + logger.info(sw.toString()) val backgroundTasks = toBackgroundTasks(backgroundTasksForFailedCompilation.toList) - Result.Failed(Nil, Some(t), elapsed, backgroundTasks, None) + val failedProblems = findFailedProblems(reporter, None) + Result.Failed(failedProblems, Some(t), elapsed, backgroundTasks, None) } } } From 2d576eae32706f20de3c501401f723f2d097112e Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Mon, 23 Sep 2024 13:28:30 +0200 Subject: [PATCH 4/6] Run OrganiseImports --- backend/src/main/scala/bloop/Compiler.scala | 1 + .../src/main/scala/bloop/util/BestEffortUtils.scala | 7 ++++--- .../main/scala/bloop/engine/tasks/CompileTask.scala | 12 ++++++------ .../engine/tasks/compilation/CompileGraph.scala | 2 +- 4 files changed, 12 insertions(+), 10 deletions(-) diff --git a/backend/src/main/scala/bloop/Compiler.scala b/backend/src/main/scala/bloop/Compiler.scala index b4ead47cf..af63951e5 100644 --- a/backend/src/main/scala/bloop/Compiler.scala +++ b/backend/src/main/scala/bloop/Compiler.scala @@ -5,6 +5,7 @@ import java.io.PrintWriter import java.io.StringWriter import java.nio.file.Files import java.nio.file.Path +import java.nio.file.Paths import java.util.Optional import java.util.concurrent.Executor diff --git a/backend/src/main/scala/bloop/util/BestEffortUtils.scala b/backend/src/main/scala/bloop/util/BestEffortUtils.scala index f29cdb6df..f9dd5cbc3 100644 --- a/backend/src/main/scala/bloop/util/BestEffortUtils.scala +++ b/backend/src/main/scala/bloop/util/BestEffortUtils.scala @@ -1,11 +1,12 @@ package bloop.util -import java.security.MessageDigest - import java.math.BigInteger import java.nio.file.Files -import scala.collection.JavaConverters._ import java.nio.file.Path +import java.security.MessageDigest + +import scala.collection.JavaConverters._ + import bloop.io.AbsolutePath object BestEffortUtils { diff --git a/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala b/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala index 1f8042d85..e77375123 100644 --- a/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala +++ b/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala @@ -1,5 +1,7 @@ package bloop.engine.tasks +import java.util.Optional + import scala.collection.mutable import scala.concurrent.Promise @@ -8,8 +10,8 @@ import bloop.CompileInputs import bloop.CompileOutPaths import bloop.CompileProducts import bloop.Compiler -import bloop.Compiler.Result.Success import bloop.Compiler.Result.Failed +import bloop.Compiler.Result.Success import bloop.cli.ExitStatus import bloop.data.Project import bloop.data.WorkspaceSettings @@ -20,10 +22,10 @@ import bloop.engine.State import bloop.engine.caches.LastSuccessfulResult import bloop.engine.tasks.compilation.FinalCompileResult import bloop.engine.tasks.compilation._ +import bloop.io.AbsolutePath import bloop.io.ParallelOps import bloop.io.ParallelOps.CopyMode import bloop.io.{Paths => BloopPaths} -import bloop.io.AbsolutePath import bloop.logging.DebugFilter import bloop.logging.Logger import bloop.logging.LoggerAction @@ -39,11 +41,9 @@ import bloop.util.BestEffortUtils.BestEffortProducts import monix.execution.CancelableFuture import monix.reactive.MulticastStrategy import monix.reactive.Observable -import sbt.internal.inc.BloopComponentCompiler -import xsbti.compile.PreviousResult -import java.util.Optional -import xsbti.compile.MiniSetup import xsbti.compile.CompileAnalysis +import xsbti.compile.MiniSetup +import xsbti.compile.PreviousResult object CompileTask { private implicit val logContext: DebugFilter = DebugFilter.Compilation diff --git a/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileGraph.scala b/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileGraph.scala index 6268ab830..aeaf317db 100644 --- a/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileGraph.scala +++ b/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileGraph.scala @@ -27,9 +27,9 @@ import bloop.logging.DebugFilter import bloop.logging.LoggerAction import bloop.reporter.ReporterAction import bloop.task.Task +import bloop.util.BestEffortUtils.BestEffortProducts import bloop.util.JavaCompat.EnrichOptional import bloop.util.SystemProperties -import bloop.util.BestEffortUtils.BestEffortProducts import xsbti.compile.PreviousResult From 449ae2028cb1ff19e2263b44939d6f3e0bce0bde Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Thu, 10 Oct 2024 17:49:49 +0200 Subject: [PATCH 5/6] Redesign best effort compilation to not have to remove successful analyses Also better cleanup directories --- .../scala/bloop/BloopClassFileManager.scala | 42 ++---- backend/src/main/scala/bloop/Compiler.scala | 136 ++++++++++-------- .../inc/bloop/BloopZincCompiler.scala | 7 +- docs/contributing-guide.md | 25 ++++ .../bloop/engine/tasks/CompileTask.scala | 100 +++++-------- 5 files changed, 158 insertions(+), 152 deletions(-) diff --git a/backend/src/main/scala/bloop/BloopClassFileManager.scala b/backend/src/main/scala/bloop/BloopClassFileManager.scala index 103bbaa37..53c8dcf9b 100644 --- a/backend/src/main/scala/bloop/BloopClassFileManager.scala +++ b/backend/src/main/scala/bloop/BloopClassFileManager.scala @@ -219,36 +219,20 @@ final class BloopClassFileManager( clientTracer.traceTaskVerbose("copy new products to external classes dir") { _ => val config = ParallelOps.CopyConfiguration(5, CopyMode.ReplaceExisting, Set.empty, Set.empty) - val clientExternalBestEffortDir = - clientExternalClassesDir.underlying.resolve("META-INF/best-effort") - - // Deletes all previous best-effort artifacts to get rid of all of the outdated ones. - // Since best effort compilation is not affected by incremental compilation, - // all relevant files are always produced by the compiler. Because of this, - // we can always delete all previous files and copy newly created ones - // without losing anything in the process. - val deleteClientExternalBestEffortDir = - Task { - if (Files.exists(clientExternalBestEffortDir)) { - BloopPaths.delete(AbsolutePath(clientExternalBestEffortDir)) - } + + ParallelOps + .copyDirectories(config)( + newClassesDir, + clientExternalClassesDir.underlying, + inputs.ioScheduler, + enableCancellation = false, + inputs.logger + ) + .map { walked => + readOnlyCopyDenylist.++=(walked.target) () - }.memoize - - deleteClientExternalBestEffortDir *> - ParallelOps - .copyDirectories(config)( - newClassesDir, - clientExternalClassesDir.underlying, - inputs.ioScheduler, - enableCancellation = false, - inputs.logger - ) - .map { walked => - readOnlyCopyDenylist.++=(walked.target) - () - } - .flatMap(_ => deleteAfterCompilation) + } + .flatMap(_ => deleteAfterCompilation) } } ) diff --git a/backend/src/main/scala/bloop/Compiler.scala b/backend/src/main/scala/bloop/Compiler.scala index af63951e5..e426e412d 100644 --- a/backend/src/main/scala/bloop/Compiler.scala +++ b/backend/src/main/scala/bloop/Compiler.scala @@ -5,7 +5,6 @@ import java.io.PrintWriter import java.io.StringWriter import java.nio.file.Files import java.nio.file.Path -import java.nio.file.Paths import java.util.Optional import java.util.concurrent.Executor @@ -49,7 +48,6 @@ import xsbti.T2 import xsbti.VirtualFileRef import xsbti.compile._ - case class CompileInputs( scalaInstance: ScalaInstance, compilerCache: CompilerCache, @@ -295,11 +293,6 @@ object Compiler { ) } - val previousWasBestEffort = compileInputs.previousCompilerResult match { - case Failed(_, _, _, _, Some(BestEffortProducts(_, _, _))) => true - case _ => !firstCompilation - } - val isFatalWarningsEnabled: Boolean = compileInputs.scalacOptions.exists(_ == "-Xfatal-warnings") def getInputs(compilers: Compilers): Inputs = { @@ -385,6 +378,9 @@ object Compiler { reporter.reportStartCompilation(previousProblems, wasPreviousSuccessful) val fileManager = newFileManager + val shouldAttemptRestartingCompilationForBestEffort = + firstCompilation && !isBestEffortDep && previousAnalysis.isDefined + // Manually skip redundant best-effort compilations. This is necessary because compiler // phases supplying the data needed to skip compilations in zinc remain unimplemented for now. val noopBestEffortResult = compileInputs.previousCompilerResult match { @@ -470,7 +466,8 @@ object Compiler { fileManager, cancelPromise, tracer, - classpathOptions + classpathOptions, + !(isBestEffortMode && isBestEffortDep) ) .materialize .doOnCancel(Task(cancel())) @@ -483,11 +480,9 @@ object Compiler { () => elapsed, reporter, backgroundTasksWhenNewSuccessfulAnalysis, - allInvalidatedClassFilesForProject, - allInvalidatedExtraCompileProducts, previousSuccessfulProblems, errorCause = None, - previousWasBestEffort + shouldAttemptRestartingCompilationForBestEffort ) case Success(result) => // Report end of compilation only after we have reported all warnings from previous runs @@ -553,16 +548,25 @@ object Compiler { val clientClassesDir = clientClassesObserver.classesDir clientLogger.debug(s"Triggering background tasks for $clientClassesDir") val updateClientState = - updateExternalClassesDirWithReadOnly( - clientClassesDir, - clientTracer, - clientLogger, - compileInputs, - readOnlyClassesDir, - readOnlyCopyDenylist, - allInvalidatedClassFilesForProject, - allInvalidatedExtraCompileProducts - ) + Task + .gatherUnordered( + List( + deleteClientExternalBestEffortDirTask(clientClassesDir), + deleteBestEffortDir() + ) + ) + .flatMap { _ => + updateExternalClassesDirWithReadOnly( + clientClassesDir, + clientTracer, + clientLogger, + compileInputs, + readOnlyClassesDir, + readOnlyCopyDenylist, + allInvalidatedClassFilesForProject, + allInvalidatedExtraCompileProducts + ) + } val writeAnalysisIfMissing = { if (compileOut.analysisOut.exists) Task.unit @@ -585,11 +589,9 @@ object Compiler { ) } .flatMap(clientClassesObserver.nextAnalysis) - - deleteBestEffortDir() *> Task + Task .gatherUnordered( List( - deleteBestEffortDir, deleteNewClassesDir, updateClientState, writeAnalysisIfMissing, @@ -598,7 +600,8 @@ object Compiler { ) .flatMap(_ => publishClientAnalysis) .onErrorHandleWith(err => { - clientLogger.debug("Caught error in background tasks"); clientLogger.trace(err); + clientLogger.debug("Caught error in background tasks"); + clientLogger.trace(err); Task.raiseError(err) }) .doOnFinish(_ => Task(clientReporter.reportEndCompilation())) @@ -642,10 +645,19 @@ object Compiler { ): Task[Unit] = { val clientClassesDir = clientClassesObserver.classesDir val successBackgroundTasks = - deleteBestEffortDir() *> Task.gatherUnordered( - backgroundTasksWhenNewSuccessfulAnalysis - .map(f => f(clientClassesDir, clientReporter, clientTracer)) - ) + Task + .gatherUnordered( + List( + deleteBestEffortDir(), + deleteClientExternalBestEffortDirTask(clientClassesDir) + ) + ) + .flatMap { _ => + Task.gatherUnordered( + backgroundTasksWhenNewSuccessfulAnalysis + .map(f => f(clientClassesDir, clientReporter, clientTracer)) + ) + } val persistTask = persistAnalysis(analysisForFutureCompilationRuns, compileOut.analysisOut) val initialTasks = List(persistTask, successBackgroundTasks) @@ -724,11 +736,9 @@ object Compiler { () => elapsed, reporter, backgroundTasksWhenNewSuccessfulAnalysis, - allInvalidatedClassFilesForProject, - allInvalidatedExtraCompileProducts, previousSuccessfulProblems, errorCause = Some(cause), - previousWasBestEffort + shouldAttemptRestartingCompilationForBestEffort ) case Failure(_: xsbti.CompileCancelled) => handleCancellation @@ -747,7 +757,7 @@ object Compiler { t.printStackTrace() val sw = new StringWriter() t.printStackTrace(new PrintWriter(sw)) - logger.info(sw.toString()) + logger.error(sw.toString()) val backgroundTasks = toBackgroundTasks(backgroundTasksForFailedCompilation.toList) val failedProblems = findFailedProblems(reporter, None) @@ -954,14 +964,11 @@ object Compiler { elapsed: () => Long, reporter: ZincReporter, backgroundTasksWhenNewSuccessfulAnalysis: mutable.ListBuffer[CompileBackgroundTasks.Sig], - allInvalidatedClassFilesForProject: mutable.HashSet[File], - allInvalidatedExtraCompileProducts: mutable.HashSet[File], previousSuccessfulProblems: List[ProblemPerPhase], errorCause: Option[xsbti.CompileFailed], - previousWasBestEffort: Boolean + shouldAttemptRestartingCompilation: Boolean ): Result = { val uniqueInputs = compileInputs.uniqueInputs - val readOnlyClassesDir = compileOut.internalReadOnlyClassesDir.underlying val newClassesDir = compileOut.internalNewClassesDir.underlying reporter.processEndCompilation( @@ -978,7 +985,7 @@ object Compiler { ) val products = CompileProducts( - readOnlyClassesDir, + newClassesDir, // let's not use readonly dir newClassesDir, noOpPreviousResult, noOpPreviousResult, @@ -995,9 +1002,13 @@ object Compiler { ): Task[Unit] = { val clientClassesDir = clientClassesObserver.classesDir val successBackgroundTasks = - backgroundTasksWhenNewSuccessfulAnalysis - .map(f => f(clientClassesDir, clientReporter, clientTracer)) - val allClientSyncTasks = Task.gatherUnordered(successBackgroundTasks.toList).flatMap { _ => + deleteClientExternalBestEffortDirTask(clientClassesDir).flatMap { _ => + Task.gatherUnordered( + backgroundTasksWhenNewSuccessfulAnalysis + .map(f => f(clientClassesDir, clientReporter, clientTracer)) + ) + } + val allClientSyncTasks = successBackgroundTasks.flatMap { _ => // Only start this task after the previous IO tasks in the external dir are done Task { // Delete everything outside of betasty and semanticdb @@ -1005,8 +1016,11 @@ object Compiler { BloopClassFileManager.supportedCompileProducts.filter(_ != ".betasty") :+ ".class" Files .walk(clientClassesDir.underlying) - .filter(path => if (Files.exists(path)) Files.isRegularFile(path) else false) - .filter(path => deletedCompileProducts.exists(path.toString.endsWith(_))) + .filter(path => + Files.isRegularFile(path) && deletedCompileProducts.exists( + path.toString.endsWith(_) + ) + ) .forEach(path => if (Files.exists(path)) Files.delete(path)) }.map(_ => ()) } @@ -1015,22 +1029,12 @@ object Compiler { } } - val recompile = - if ( - !previousWasBestEffort && !(compileOut.internalReadOnlyClassesDir.exists && BloopPaths - .list(compileOut.internalReadOnlyClassesDir) - .length == 0) - ) { - if (compileOut.analysisOut.exists) BloopPaths.delete(compileOut.analysisOut) - BloopPaths.delete(compileOut.internalReadOnlyClassesDir) - Files.createDirectories(Paths.get(compileOut.internalReadOnlyClassesDir.toString)) - BloopPaths.delete(compileOut.internalNewClassesDir) - Files.createDirectories(Paths.get(compileOut.internalNewClassesDir.toString)) - true - } else false + if (shouldAttemptRestartingCompilation) { + BloopPaths.delete(compileOut.internalNewClassesDir) + } val newHash = - if (previousWasBestEffort) + if (!shouldAttemptRestartingCompilation) BestEffortUtils.hashResult( products.newClassesDir, compileInputs.sources, @@ -1043,7 +1047,7 @@ object Compiler { None, elapsed(), backgroundTasksExecution, - Some(BestEffortProducts(products, newHash, recompile)) + Some(BestEffortProducts(products, newHash, shouldAttemptRestartingCompilation)) ) } @@ -1211,4 +1215,20 @@ object Compiler { } } } + + // Deletes all previous best-effort artifacts to get rid of all of the outdated ones. + // Since best effort compilation is not affected by incremental compilation, + // all relevant files are always produced by the compiler. Because of this, + // we can always delete all previous files and copy newly created ones + // without losing anything in the process. + def deleteClientExternalBestEffortDirTask(clientClassesDir: AbsolutePath) = { + val clientExternalBestEffortDir = + clientClassesDir.underlying.resolve("META-INF/best-effort") + Task { + if (Files.exists(clientExternalBestEffortDir)) { + BloopPaths.delete(AbsolutePath(clientExternalBestEffortDir)) + } + () + }.memoize + } } diff --git a/backend/src/main/scala/sbt/internal/inc/bloop/BloopZincCompiler.scala b/backend/src/main/scala/sbt/internal/inc/bloop/BloopZincCompiler.scala index 775163ebc..16ed24903 100644 --- a/backend/src/main/scala/sbt/internal/inc/bloop/BloopZincCompiler.scala +++ b/backend/src/main/scala/sbt/internal/inc/bloop/BloopZincCompiler.scala @@ -58,7 +58,8 @@ object BloopZincCompiler { manager: ClassFileManager, cancelPromise: Promise[Unit], tracer: BraveTracer, - classpathOptions: ClasspathOptions + classpathOptions: ClasspathOptions, + withPreviousResult: Boolean ): Task[CompileResult] = { val config = in.options() val setup = in.setup() @@ -81,8 +82,8 @@ object BloopZincCompiler { scalacOptions, javacOptions, classpathOptions, - in.previousResult.analysis.toOption, - in.previousResult.setup.toOption, + if (withPreviousResult) in.previousResult.analysis.toOption else None, + if (withPreviousResult) in.previousResult.setup.toOption else None, perClasspathEntryLookup, reporter, order, diff --git a/docs/contributing-guide.md b/docs/contributing-guide.md index 807ce423c..64d3349b3 100644 --- a/docs/contributing-guide.md +++ b/docs/contributing-guide.md @@ -242,3 +242,28 @@ indices. - Push the new tag and wait for the release - Announce the release after the release notes are published in the most recent release. + + +# Best effort compilation pipeline + +As of writing this part of the doc this is an experimental set of settings implemented +in the Scala 3 compiler (starting with 3.5.x). They allow the compiler to return artifacts +even when the compilation fails (returning `.betasty` files instead of `.class` and `.tasty`). +It also at this point does not support incremental compilation. This all requires special +handling from the build tool, mostly located in `Compiler.scala`, `CompileTask.scala` +and `CompileGraph.scala`: +- We save best effort artifacts separately, and allow dependent projects to compile using +that, even when the compilation has failed. If the project compiles we discard the best effort +artifacts. +- First, we try compiling partially (only the modified files), expecting regular successful compilation +- If that at some point fails, we discard the immediate results and recompile the whole module +expecting .betasty files. We do not ever move them to a readOnly directory. That readOnly directory +is also not used in dependent compilations. +- We do not try to recompile if we know we are compiling the whole module to begin with (e.g. because we +are depending on .betasty from different project, or because this is the first compilation and we +do not have any previous incremental compilation analysis). +- If we try to recompile a module that we previously compiled for .betasty, we once again, try to +recompile it 2 times - once incrementally expecting success (recompiling all files changed since +the last successful compilation, as dictated by the incremental compilation analysis) and then +recompile all - this works out to be faster than discarding the last successful result and jumping +between full successful recompilation and full best effort recompilation. diff --git a/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala b/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala index e77375123..a5b72d5ee 100644 --- a/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala +++ b/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala @@ -176,47 +176,6 @@ object CompileTask { // Block on the task associated with this result that sets up the read-only classes dir waitOnReadClassesDir.flatMap { _ => - def getAllSourceInputs(project: Project): List[AbsolutePath] = { - import java.nio.file.Files - import scala.collection.JavaConverters._ - - val uniqueSourceDirs = project.sources - - val sourceExts = Seq(".scala", ".java") - val unmanagedSources: mutable.Set[AbsolutePath] = mutable.Set() - - uniqueSourceDirs.map(_.underlying).foreach { file => - if (!Files.exists(file)) () - else if ( - Files.isRegularFile(file) && sourceExts.exists(ext => file.toString.endsWith(ext)) - ) { - unmanagedSources.add(AbsolutePath(file)) - } else if (Files.isDirectory(file)) { - Files.walk(file).iterator().asScala.foreach { file => - if ( - Files.isRegularFile(file) && sourceExts - .exists(ext => file.toString.endsWith(ext)) - ) { - unmanagedSources.add(AbsolutePath(file)) - } - } - } - } - - project.sourcesGlobs.foreach { glob => - Files.walk(glob.directory.underlying).iterator().asScala.foreach { file => - if ( - Files.isRegularFile(file) && sourceExts - .exists(ext => file.toString.endsWith(ext)) && glob.matches(file) - ) { - unmanagedSources.add(AbsolutePath(file)) - } - } - } - - unmanagedSources.toList - } - // Only when the task is finished, we kickstart the compilation def compile(inputs: CompileInputs) = { val firstResult = Compiler.compile(inputs, isBestEffort, isBestEffortDep, true) @@ -230,11 +189,10 @@ object CompileTask { ) if recompile => // we restart the compilation, starting from scratch (without any previous artifacts) inputs.reporter.reset() - val foundSrcs = getAllSourceInputs(project) val emptyResult = PreviousResult.of(Optional.empty[CompileAnalysis], Optional.empty[MiniSetup]) val newInputs = inputs.copy( - sources = foundSrcs.toArray, + sources = inputs.sources, previousCompilerResult = result, previousResult = emptyResult ) @@ -535,29 +493,47 @@ object CompileTask { logger.debug(s"Scheduling to delete ${previousClassesDir} superseded by $newClassesDir") Some(previousClassesDir) } - case Failed(_, _, _, _, Some(BestEffortProducts(products, _, _))) => - val newClassesDir = products.newClassesDir - previousResult match { - case Some(Failed(_, _, _, _, Some(BestEffortProducts(previousProducts, _, _)))) => - val previousClassesDir = previousProducts.newClassesDir - if (previousClassesDir != newClassesDir) { - logger.debug( - s"Scheduling to delete ${previousClassesDir} superseded by $newClassesDir" - ) - Some(AbsolutePath(previousClassesDir)) - } else None + case _ => None + } + + val previousBestEffortToDelete = previousResult match { + case Some(Failed(_, _, _, _, Some(BestEffortProducts(previousProducts, _, _)))) => + val newClassesDirOpt = compilerResult match { + case Success(_, _, products, _, _, _, _) => Some(products.newClassesDir) + case Failed(_, _, _, _, Some(BestEffortProducts(products, _, _))) => + Some(products.newClassesDir) case _ => None } + val previousClassesDir = previousProducts.newClassesDir + + newClassesDirOpt.flatMap { newClassesDir => + if (previousClassesDir != newClassesDir) { + logger.debug( + s"Scheduling to delete ${previousClassesDir} superseded by $newClassesDir" + ) + Some(AbsolutePath(previousClassesDir)) + } else None + } case _ => None } - previousReadOnlyToDelete match { - case None => Task.unit - case Some(classesDir) => - Task.eval { - logger.debug(s"Deleting contents of orphan dir $classesDir") - BloopPaths.delete(classesDir) - }.asyncBoundary - } + def deleteOrphanDir(orphanDir: Option[AbsolutePath]): Task[Unit] = + orphanDir match { + case None => Task.unit + case Some(classesDir) => + Task.eval { + logger.debug(s"Deleting contents of orphan dir $classesDir") + BloopPaths.delete(classesDir) + }.asyncBoundary + } + + Task + .gatherUnordered( + List( + deleteOrphanDir(previousReadOnlyToDelete), + deleteOrphanDir(previousBestEffortToDelete) + ) + ) + .map(_ => ()) } } From 8ec5c8db4bf96ab7d2ea4348aceb2dade139942b Mon Sep 17 00:00:00 2001 From: Jan Chyb Date: Wed, 23 Oct 2024 14:26:58 +0200 Subject: [PATCH 6/6] Address review comments --- backend/src/main/scala/bloop/Compiler.scala | 9 +++------ .../main/scala/bloop/engine/tasks/CompileTask.scala | 10 ++++++++-- .../bloop/engine/tasks/compilation/CompileGraph.scala | 11 ++++++----- 3 files changed, 17 insertions(+), 13 deletions(-) diff --git a/backend/src/main/scala/bloop/Compiler.scala b/backend/src/main/scala/bloop/Compiler.scala index e426e412d..9c190e6d9 100644 --- a/backend/src/main/scala/bloop/Compiler.scala +++ b/backend/src/main/scala/bloop/Compiler.scala @@ -1016,12 +1016,9 @@ object Compiler { BloopClassFileManager.supportedCompileProducts.filter(_ != ".betasty") :+ ".class" Files .walk(clientClassesDir.underlying) - .filter(path => - Files.isRegularFile(path) && deletedCompileProducts.exists( - path.toString.endsWith(_) - ) - ) - .forEach(path => if (Files.exists(path)) Files.delete(path)) + .filter(path => Files.isRegularFile(path)) + .filter(path => deletedCompileProducts.exists(path.toString.endsWith(_))) + .forEach(Files.delete(_)) }.map(_ => ()) } diff --git a/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala b/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala index a5b72d5ee..86a4a91c2 100644 --- a/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala +++ b/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala @@ -178,7 +178,8 @@ object CompileTask { waitOnReadClassesDir.flatMap { _ => // Only when the task is finished, we kickstart the compilation def compile(inputs: CompileInputs) = { - val firstResult = Compiler.compile(inputs, isBestEffort, isBestEffortDep, true) + val firstResult = + Compiler.compile(inputs, isBestEffort, isBestEffortDep, firstCompilation = true) firstResult.flatMap { case result @ Compiler.Result.Failed( _, @@ -196,7 +197,12 @@ object CompileTask { previousCompilerResult = result, previousResult = emptyResult ) - Compiler.compile(newInputs, isBestEffort, isBestEffortDep, false) + Compiler.compile( + newInputs, + isBestEffort, + isBestEffortDep, + firstCompilation = false + ) case result => Task(result) } } diff --git a/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileGraph.scala b/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileGraph.scala index aeaf317db..1aab0af1e 100644 --- a/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileGraph.scala +++ b/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileGraph.scala @@ -389,7 +389,7 @@ object CompileGraph { PartialFailure(bundle.project, FailedOrCancelledPromise, Task.now(results)) } - def loop(dag: Dag[Project], isBestEffortDep: Boolean): CompileTraversal = { + def loop(dag: Dag[Project]): CompileTraversal = { tasks.get(dag) match { case Some(task) => task case None => @@ -397,6 +397,7 @@ object CompileGraph { case Leaf(project) => val bundleInputs = BundleInputs(project, dag, Map.empty) setupAndDeduplicate(client, bundleInputs, computeBundle) { bundle => + val isBestEffortDep = false compile(Inputs(bundle, Map.empty), bestEffortAllowed && project.isBestEffort, isBestEffortDep).map { results => results.fromCompiler match { case Compiler.Result.Ok(_) => Leaf(partialSuccess(bundle, results)) @@ -406,13 +407,13 @@ object CompileGraph { } case Aggregate(dags) => - val downstream = dags.map(loop(_, isBestEffortDep = false)) + val downstream = dags.map(loop(_)) Task.gatherUnordered(downstream).flatMap { dagResults => Task.now(Parent(PartialEmpty, dagResults)) } case Parent(project, dependencies) => - val downstream = dependencies.map(loop(_, isBestEffortDep = false)) + val downstream = dependencies.map(loop(_)) Task.gatherUnordered(downstream).flatMap { dagResults => val depsSupportBestEffort = dependencies.map(Dag.dfs(_, mode = Dag.PreOrder)).flatten.forall(_.isBestEffort) @@ -433,7 +434,7 @@ object CompileGraph { case _ => false } val continue = bestEffortAllowed && depsSupportBestEffort && successfulBestEffort || failed.isEmpty - val dependsOnBestEffort = failed.nonEmpty && bestEffortAllowed && depsSupportBestEffort || isBestEffortDep + val dependsOnBestEffort = failed.nonEmpty && bestEffortAllowed && depsSupportBestEffort if (!continue) { // Register the name of the projects we're blocked on (intransitively) @@ -479,7 +480,7 @@ object CompileGraph { } } - loop(dag, isBestEffortDep = false) + loop(dag) } private def errorToString(err: Throwable): String = {