From 7194388c7abca1fe279710852f68711ff32914b1 Mon Sep 17 00:00:00 2001 From: Tomasz Godzik Date: Mon, 8 Feb 2021 11:04:54 +0100 Subject: [PATCH] Switch to the original fork of zinc and remove pipelining. In order to make upgrading versions easier we decided to switch back to sbt/zinc instead of a custom fork. The original reason for using the fork was to enable build pipelining, but that itself has been implemented in the orginal fork. Unfortunately, to make the migration easier we needed to remove build pipelining for now to later add it with the default mechanism. Related to https://github.com/scalacenter/bloop/issues/1383 The benchmarks are being run to make sure we are not regressing in performance. --- .gitmodules | 4 - .../scala/bloop/BloopClassFileManager.scala | 68 ++++- .../src/main/scala/bloop/CompileMode.scala | 27 -- .../main/scala/bloop/CompileProducts.scala | 3 +- backend/src/main/scala/bloop/Compiler.scala | 36 +-- .../src/main/scala/bloop/CompilerCache.scala | 73 ++--- .../src/main/scala/bloop/CompilerOracle.scala | 70 ----- .../scala/bloop/PartialCompileProducts.scala | 3 +- .../src/main/scala/bloop/ScalaInstance.scala | 16 +- .../scala/bloop/scalasig/PickleMarker.scala | 60 ---- .../main/scala/bloop/scalasig/ScalaSig.scala | 71 ----- .../scala/bloop/scalasig/ScalaSigWriter.scala | 144 --------- .../internal/inc/BloopComponentCompiler.scala | 16 +- .../inc/BloopZincLibraryManagement.scala | 8 +- .../inc/bloop/BloopZincCompiler.scala | 13 +- .../internal/BloopAnalysisCallback.scala | 22 -- .../internal/BloopHighLevelCompiler.scala | 64 ++-- .../inc/bloop/internal/BloopIncremental.scala | 7 +- .../inc/bloop/internal/BloopNameHashing.scala | 1 - .../internal/ConcurrentAnalysisCallback.scala | 22 -- .../inc/bloop/internal/StopPipelining.scala | 15 - .../test/scala/bloop/CompilerCacheSpec.scala | 1 - benchmark-bridge | 2 +- bin/run-benchmarks.sh | 16 +- .../scala/bloop/bsp/BloopBspServices.scala | 2 +- .../main/scala/bloop/data/ClientInfo.scala | 4 +- .../main/scala/bloop/engine/Interpreter.scala | 1 - .../engine/caches/LastSuccessfulResult.scala | 1 - .../bloop/engine/caches/ResultsCache.scala | 4 +- .../bloop/engine/tasks/CompileTask.scala | 66 +---- .../main/scala/bloop/engine/tasks/Tasks.scala | 2 +- .../tasks/compilation/CompileBundle.scala | 4 +- .../compilation/CompileDefinitions.scala | 9 - .../tasks/compilation/CompileGraph.scala | 275 +----------------- .../tasks/compilation/CompileResult.scala | 37 +-- .../tasks/compilation/PipeliningOracle.scala | 150 ---------- .../tasks/compilation/SimpleOracle.scala | 20 -- .../main/scala/bloop/io/SourceHasher.scala | 1 - .../scala/bloop/BuildPipeliningSpec.scala | 159 ---------- project/Dependencies.scala | 4 +- zinc | 1 - 41 files changed, 184 insertions(+), 1318 deletions(-) delete mode 100644 backend/src/main/scala/bloop/CompileMode.scala delete mode 100644 backend/src/main/scala/bloop/CompilerOracle.scala delete mode 100644 backend/src/main/scala/bloop/scalasig/PickleMarker.scala delete mode 100644 backend/src/main/scala/bloop/scalasig/ScalaSig.scala delete mode 100644 backend/src/main/scala/bloop/scalasig/ScalaSigWriter.scala delete mode 100644 backend/src/main/scala/sbt/internal/inc/bloop/internal/StopPipelining.scala delete mode 100644 frontend/src/main/scala/bloop/engine/tasks/compilation/PipeliningOracle.scala delete mode 100644 frontend/src/main/scala/bloop/engine/tasks/compilation/SimpleOracle.scala delete mode 100644 frontend/src/test/scala/bloop/BuildPipeliningSpec.scala delete mode 160000 zinc diff --git a/.gitmodules b/.gitmodules index d885e28346..2d8bece971 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,7 +1,3 @@ -[submodule "zinc"] - path = zinc - url = https://github.com/scalacenter/zinc.git - branch = loop [submodule "nailgun"] path = nailgun url = https://github.com/scalacenter/nailgun.git diff --git a/backend/src/main/scala/bloop/BloopClassFileManager.scala b/backend/src/main/scala/bloop/BloopClassFileManager.scala index cf8afede1b..365c19c92b 100644 --- a/backend/src/main/scala/bloop/BloopClassFileManager.scala +++ b/backend/src/main/scala/bloop/BloopClassFileManager.scala @@ -1,28 +1,29 @@ package bloop -import bloop.io.{Paths => BloopPaths} import bloop.io.AbsolutePath -import bloop.tracing.BraveTracer import bloop.io.ParallelOps import bloop.io.ParallelOps.CopyMode +import bloop.io.{Paths => BloopPaths} +import bloop.reporter.Reporter +import bloop.tracing.BraveTracer +import monix.eval.Task +import xsbti.compile.ClassFileManager +import xsbti.compile.PreviousResult import java.io.File +import java.io.IOException +import java.nio.file.CopyOption +import java.nio.file.Files import java.nio.file.Path import java.nio.file.Paths - +import java.nio.file.StandardCopyOption import scala.collection.mutable - -import xsbti.compile.ClassFileManager -import monix.eval.Task -import bloop.reporter.Reporter -import xsbti.compile.PreviousResult -import java.nio.file.Files -import java.io.IOException -import scala.util.Try import scala.util.Failure import scala.util.Success +import scala.util.Try final class BloopClassFileManager( + backupDir0: Path, inputs: CompileInputs, outPaths: CompileOutPaths, allGeneratedRelativeClassFilePaths: mutable.HashMap[String, File], @@ -38,9 +39,16 @@ final class BloopClassFileManager( private[this] val newClassesDirPath = newClassesDir.toString private[this] val dependentClassFilesLinks = new mutable.HashSet[Path]() private[this] val weakClassFileInvalidations = new mutable.HashSet[Path]() + private[this] val generatedFiles = new mutable.HashSet[File] // Supported compile products by the class file manager private[this] val supportedCompileProducts = List(".sjsir", ".nir", ".tasty") + // Files backed up during compilation + private[this] val movedFiles = new mutable.HashMap[File, File] + + private val backupDir = backupDir0.normalize + backupDir.toFile.delete() + Files.createDirectories(backupDir) /** * Returns the set of all invalidated class files. @@ -129,7 +137,6 @@ final class BloopClassFileManager( } } } - allInvalidatedClassFilesForProject.++=(classes) val invalidatedExtraCompileProducts = classes.flatMap { classFile => @@ -142,12 +149,26 @@ final class BloopClassFileManager( } } + // Idea taken from the default TransactionalClassFileManager in zinc + // https://github.com/sbt/zinc/blob/c18637c1b30f8ab7d1f702bb98301689ec75854b/internal/zinc-core/src/main/scala/sbt/internal/inc/ClassFileManager.scala#L183 + val toBeBackedUp = (classes ++ invalidatedExtraCompileProducts).filter(c => + !movedFiles.contains(c) && !generatedFiles(c) + ) + for (c <- toBeBackedUp) + if (c.exists) + movedFiles.put(c, move(c)).foreach(move) + + classes.foreach { f => + if (f.exists()) f.delete() + } + allInvalidatedExtraCompileProducts.++=(invalidatedExtraCompileProducts) } def generated(generatedClassFiles: Array[File]): Unit = { memoizedInvalidatedClassFiles = null generatedClassFiles.foreach { generatedClassFile => + generatedFiles += generatedClassFile val newClassFile = generatedClassFile.getAbsolutePath val relativeClassFilePath = newClassFile.replace(newClassesDirPath, "") allGeneratedRelativeClassFilePaths.put(relativeClassFilePath, generatedClassFile) @@ -167,6 +188,7 @@ final class BloopClassFileManager( allInvalidatedExtraCompileProducts.-=(productAssociatedToClassFile) } } + } def complete(success: Boolean): Unit = { @@ -200,6 +222,22 @@ final class BloopClassFileManager( } ) } else { + /* Restore all files from backuped last successful compilation to make sure + * that they are still available. + */ + for { + (orig, tmp) <- movedFiles + } { + if (tmp.exists) { + if (!orig.getParentFile.exists) { + Files.createDirectory(orig.getParentFile.toPath()) + } + Files.move(tmp.toPath(), orig.toPath()) + } + } + backupDir.toFile().delete() + () + // Delete all compilation products generated in the new classes directory val deleteNewDir = Task { BloopPaths.delete(AbsolutePath(newClassesDir)); () }.memoize backgroundTasksForFailedCompilation.+=( @@ -245,6 +283,12 @@ final class BloopClassFileManager( ) } } + + private def move(c: File): File = { + val target = Files.createTempFile(backupDir, "bloop", ".class").toFile + Files.move(c.toPath(), target.toPath(), StandardCopyOption.REPLACE_EXISTING) + target + } } object BloopClassFileManager { diff --git a/backend/src/main/scala/bloop/CompileMode.scala b/backend/src/main/scala/bloop/CompileMode.scala deleted file mode 100644 index fceab7e07f..0000000000 --- a/backend/src/main/scala/bloop/CompileMode.scala +++ /dev/null @@ -1,27 +0,0 @@ -package bloop - -import _root_.monix.eval.Task -import scala.concurrent.Promise -import bloop.io.AbsolutePath -import xsbti.compile.Signature - -/** - * Defines the mode in which compilation should run. - */ -sealed trait CompileMode { - def oracle: CompilerOracle -} - -object CompileMode { - case class Sequential( - oracle: CompilerOracle - ) extends CompileMode - - final case class Pipelined( - completeJavaCompilation: Promise[Unit], - finishedCompilation: Promise[Option[CompileProducts]], - fireJavaCompilation: Task[JavaSignal], - oracle: CompilerOracle, - separateJavaAndScala: Boolean - ) extends CompileMode -} diff --git a/backend/src/main/scala/bloop/CompileProducts.scala b/backend/src/main/scala/bloop/CompileProducts.scala index 318394380a..3729a4522c 100644 --- a/backend/src/main/scala/bloop/CompileProducts.scala +++ b/backend/src/main/scala/bloop/CompileProducts.scala @@ -35,6 +35,5 @@ case class CompileProducts( resultForDependentCompilationsInSameRun: PreviousResult, resultForFutureCompilationRuns: PreviousResult, invalidatedCompileProducts: Set[File], - generatedRelativeClassFilePaths: Map[String, File], - definedMacroSymbols: Array[String] + generatedRelativeClassFilePaths: Map[String, File] ) diff --git a/backend/src/main/scala/bloop/Compiler.scala b/backend/src/main/scala/bloop/Compiler.scala index d851ee62af..559d2f0159 100644 --- a/backend/src/main/scala/bloop/Compiler.scala +++ b/backend/src/main/scala/bloop/Compiler.scala @@ -13,8 +13,6 @@ import bloop.tracing.BraveTracer import bloop.logging.{ObservedLogger, Logger} import bloop.reporter.{ProblemPerPhase, ZincReporter} import bloop.util.{AnalysisUtils, UUIDUtil, CacheHashCode} -import bloop.CompileMode.Pipelined -import bloop.CompileMode.Sequential import xsbti.compile._ import xsbti.T2 @@ -23,7 +21,6 @@ import sbt.util.InterfaceUtil import sbt.internal.inc.Analysis import sbt.internal.inc.bloop.BloopZincCompiler import sbt.internal.inc.{FreshCompilerCache, InitialChanges, Locate} -import sbt.internal.inc.bloop.internal.StopPipelining import sbt.internal.inc.{ConcreteAnalysisContents, FileAnalysisStore} import scala.concurrent.Promise @@ -55,7 +52,6 @@ case class CompileInputs( previousCompilerResult: Compiler.Result, reporter: ZincReporter, logger: ObservedLogger[Logger], - mode: CompileMode, dependentResults: Map[File, PreviousResult], cancelPromise: Promise[Unit], tracer: BraveTracer, @@ -260,6 +256,7 @@ object Compiler { def newFileManager: ClassFileManager = { new BloopClassFileManager( + Files.createTempDirectory("bloop"), compileInputs, compileOut, allGeneratedRelativeClassFilePaths, @@ -300,7 +297,6 @@ object Compiler { .withClasspath(classpath) .withScalacOptions(optionsWithoutFatalWarnings) .withJavacOptions(inputs.javacOptions) - .withClasspathOptions(inputs.classpathOptions) .withOrder(inputs.compileOrder) } @@ -344,19 +340,12 @@ object Compiler { import ch.epfl.scala.bsp import scala.util.{Success, Failure} - val mode = compileInputs.mode val reporter = compileInputs.reporter def cancel(): Unit = { // Complete all pending promises when compilation is cancelled logger.debug(s"Cancelling compilation from ${readOnlyClassesDirPath} to ${newClassesDirPath}") compileInputs.cancelPromise.trySuccess(()) - mode match { - case _: Sequential => () - case Pipelined(completeJava, finishedCompilation, _, _, _) => - completeJava.trySuccess(()) - finishedCompilation.tryFailure(CompileExceptions.FailedOrCancelledPromise) - } // Always report the compilation of a project no matter if it's completed reporter.reportCancelledCompilation() @@ -379,7 +368,16 @@ object Compiler { val uniqueInputs = compileInputs.uniqueInputs reporter.reportStartCompilation(previousProblems) BloopZincCompiler - .compile(inputs, mode, reporter, logger, uniqueInputs, newFileManager, cancelPromise, tracer) + .compile( + inputs, + reporter, + logger, + uniqueInputs, + newFileManager, + cancelPromise, + tracer, + classpathOptions + ) .materialize .doOnCancel(Task(cancel())) .map { @@ -417,9 +415,9 @@ object Compiler { val invalidatedExtraProducts = allInvalidatedExtraCompileProducts.iterator.map(_.toPath).toSet val invalidatedInThisProject = invalidatedClassFiles ++ invalidatedExtraProducts - val blacklist = invalidatedInThisProject ++ readOnlyCopyBlacklist.iterator + val denyList = invalidatedInThisProject ++ readOnlyCopyBlacklist.iterator val config = - ParallelOps.CopyConfiguration(5, CopyMode.ReplaceIfMetadataMismatch, blacklist) + ParallelOps.CopyConfiguration(5, CopyMode.ReplaceIfMetadataMismatch, denyList) val lastCopy = ParallelOps.copyDirectories(config)( readOnlyClassesDir, clientClassesDir.underlying, @@ -444,7 +442,6 @@ object Compiler { } val isNoOp = previousAnalysis.contains(analysis) - val definedMacroSymbols = mode.oracle.collectDefinedMacroSymbols if (isNoOp) { // If no-op, return previous result with updated classpath hashes val noOpPreviousResult = { @@ -460,8 +457,7 @@ object Compiler { noOpPreviousResult, noOpPreviousResult, Set(), - Map.empty, - definedMacroSymbols + Map.empty ) val backgroundTasks = new CompileBackgroundTasks { @@ -578,8 +574,7 @@ object Compiler { resultForDependentCompilationsInSameRun, resultForFutureCompilationRuns, allInvalidated.toSet, - allGeneratedProducts, - definedMacroSymbols + allGeneratedProducts ) Result.Success( @@ -599,7 +594,6 @@ object Compiler { reporter.reportEndCompilation() cause match { - case f: StopPipelining => Result.Blocked(f.failedProjectNames) case f: xsbti.CompileFailed => // We cannot guarantee reporter.problems == f.problems, so we aggregate them together val reportedProblems = reporter.allProblemsPerPhase.toList diff --git a/backend/src/main/scala/bloop/CompilerCache.scala b/backend/src/main/scala/bloop/CompilerCache.scala index 425aade776..d7bbae4277 100644 --- a/backend/src/main/scala/bloop/CompilerCache.scala +++ b/backend/src/main/scala/bloop/CompilerCache.scala @@ -1,45 +1,43 @@ package bloop -import java.io.File -import java.lang.Iterable -import java.io.PrintWriter -import java.util.concurrent.ConcurrentHashMap - -import javax.tools.JavaFileManager.Location -import javax.tools.JavaFileObject.Kind -import javax.tools.{ - FileObject, - ForwardingJavaFileManager, - JavaFileManager, - JavaFileObject, - JavaCompiler => JavaxCompiler -} -import bloop.io.{AbsolutePath, Paths} -import bloop.util.JavaRuntime +import bloop.io.AbsolutePath +import bloop.io.Paths import bloop.logging.Logger +import bloop.util.JavaRuntime +import sbt.internal.inc.AnalyzingCompiler +import sbt.internal.inc.BloopComponentCompiler +import sbt.internal.inc.BloopZincLibraryManagement +import sbt.internal.inc.ZincUtil +import sbt.internal.inc.bloop.ZincInternals +import sbt.internal.inc.javac.DiagnosticsReporter +import sbt.internal.inc.javac.JavaTools +import sbt.internal.inc.javac.Javadoc +import sbt.internal.inc.javac.WriteReportingJavaFileObject +import sbt.internal.util.LoggerWriter import sbt.librarymanagement.Resolver import xsbti.ComponentProvider -import xsbti.compile.Compilers -import xsbti.compile.{JavaCompiler, JavaTool => XJavaTool} import xsbti.compile.ClassFileManager -import xsbti.{Logger => XLogger, Reporter => XReporter} -import sbt.internal.inc.bloop.ZincInternals -import sbt.internal.inc.{ - AnalyzingCompiler, - ZincUtil, - BloopZincLibraryManagement, - BloopComponentCompiler -} -import sbt.internal.inc.javac.{ - DiagnosticsReporter, - JavaTools, - Javadoc, - WriteReportingJavaFileObject -} -import sbt.internal.util.LoggerWriter +import xsbti.compile.Compilers +import xsbti.compile.JavaCompiler +import xsbti.compile.ScalaCompiler +import xsbti.compile.{JavaTool => XJavaTool} +import xsbti.{Logger => XLogger} +import xsbti.{Reporter => XReporter} + +import java.io.File import java.io.IOException +import java.io.PrintWriter +import java.lang.Iterable +import java.util.concurrent.ConcurrentHashMap +import javax.tools.FileObject +import javax.tools.ForwardingJavaFileManager +import javax.tools.JavaFileManager +import javax.tools.JavaFileManager.Location +import javax.tools.JavaFileObject +import javax.tools.JavaFileObject.Kind +import javax.tools.{JavaCompiler => JavaxCompiler} +import scala.collection.mutable.HashSet import scala.concurrent.ExecutionContext -import xsbti.compile.ScalaCompiler final class CompilerCache( componentProvider: ComponentProvider, @@ -162,7 +160,7 @@ final class CompilerCache( import sbt.util.InterfaceUtil InterfaceUtil.toOption(topts.classFileManager()) match { case None => logger.error("Missing class file manager for forked Java compiler"); false - case Some(classFileManager) => + case Some(classFileManager: BloopClassFileManager) => import java.nio.file.Files val newInvalidatedEntry = AbsolutePath( Files.createTempDirectory("invalidated-forked-javac") @@ -201,6 +199,7 @@ final class CompilerCache( } finally { Paths.delete(newInvalidatedEntry) } + case _ => logger.error("Missing Bloop class file manager for forked Java compiler"); false } } } @@ -309,7 +308,9 @@ final class CompilerCache( val invalidated = { zincManager match { case m: bloop.BloopClassFileManager => m.invalidatedClassFilesSet - case _ => zincManager.invalidatedClassFiles().toSet + // Bloop uses it's own classfile manager so this should not happen + case _ => new HashSet[File]() + } } diff --git a/backend/src/main/scala/bloop/CompilerOracle.scala b/backend/src/main/scala/bloop/CompilerOracle.scala deleted file mode 100644 index 2ef484ac64..0000000000 --- a/backend/src/main/scala/bloop/CompilerOracle.scala +++ /dev/null @@ -1,70 +0,0 @@ -package bloop - -import java.io.File -import bloop.io.AbsolutePath -import xsbti.compile.Signature - -/** - * A compiler oracle is an entity that provides answers to questions that come - * up during the compilation of build targets. The oracle is an entity capable - * of synchronizing and answering questions critical for deduplicating and - * running compilations concurrently. - * - * For example, if a project wants to know something about the compilation of - * its dependencies, the oracle would be the right place to create a method - * that provides answers. - * - * The compiler oracle is created every time a project compilation is - * scheduled. Depending on the implementation, it can know both global - * information such as all the ongoing compilations happening in the build - * server, local data such as how a target is being compiled or both. - */ -abstract class CompilerOracle { - - /** - * Returns java sources of all those dependent projects whose compilations - * are not yet finished when build pipelining is enabled. If build pipelining - * is disabled, returns always an empty list since the class files of Java - * sources are already present in the compilation classpath. - */ - def askForJavaSourcesOfIncompleteCompilations: List[File] - - /** - * Registers a macro defined during this compilation run. It takes a full - * symbol name and associates it with the project under compilation. - */ - def registerDefinedMacro(definedMacroSymbol: String): Unit - - /** - * Collects all macro symbols that have been defined by this compilation. - */ - def collectDefinedMacroSymbols: Array[String] - - /** - * Blocks until the macro classpath for this macro is ready. If the macro has - * not been defined, we ignore it (it comes from a third-party library), - * otherwise we will wait until all dependent projects defining macros have - * finished compilation. - */ - def blockUntilMacroClasspathIsReady(usedMacroSymbol: String): Unit - - /** - * Answers if build pipelining is enabled in the whole compilation run. - */ - def isPipeliningEnabled: Boolean - - /** - * Starts downstream compilations with the compile pickle data generated - * during the compilation of a project. This method needs to take care of - * making the pickles accessible to downstream compilations. - */ - def startDownstreamCompilations(signatures: Array[Signature]): Unit - - /** - * Collects all downstream signatures of transitive dependencies that have - * not yet finished compilation. Those dependencies that finished - * compilation don't need to provide their signatures because they will be - * loaded from the classes directory. - */ - def collectDownstreamSignatures(): Array[Signature] -} diff --git a/backend/src/main/scala/bloop/PartialCompileProducts.scala b/backend/src/main/scala/bloop/PartialCompileProducts.scala index 626fb9c5e1..4259e7a713 100644 --- a/backend/src/main/scala/bloop/PartialCompileProducts.scala +++ b/backend/src/main/scala/bloop/PartialCompileProducts.scala @@ -8,6 +8,5 @@ import bloop.io.AbsolutePath */ case class PartialCompileProducts( readOnlyClassesDir: AbsolutePath, - newClassesDir: AbsolutePath, - definedMacroSymbols: Array[String] + newClassesDir: AbsolutePath ) diff --git a/backend/src/main/scala/bloop/ScalaInstance.scala b/backend/src/main/scala/bloop/ScalaInstance.scala index f7a106566c..5d7f1775c6 100644 --- a/backend/src/main/scala/bloop/ScalaInstance.scala +++ b/backend/src/main/scala/bloop/ScalaInstance.scala @@ -20,17 +20,15 @@ final class ScalaInstance private ( override val version: String, override val allJars: Array[File] ) extends xsbti.compile.ScalaInstance { - override val compilerJar: File = { + + override def libraryJars(): Array[File] = { allJars - .find(f => isJar(f.getName) && hasScalaCompilerName(f.getName)) - .getOrElse( - sys.error(s"Missing compiler jar in Scala jars ${allJars.mkString(", ")}") - ) + .filter(f => isJar(f.getName) && hasScalaLibraryName(f.getName)) } - override val libraryJar: File = { + override val compilerJar: File = { allJars - .find(f => isJar(f.getName) && hasScalaLibraryName(f.getName)) + .find(f => isJar(f.getName) && hasScalaCompilerName(f.getName)) .getOrElse( sys.error(s"Missing compiler jar in Scala jars ${allJars.mkString(", ")}") ) @@ -47,10 +45,10 @@ final class ScalaInstance private ( (organization == "org.scala-lang" && version.startsWith("3.")) override lazy val loaderLibraryOnly: ClassLoader = - new URLClassLoader(Array(libraryJar.toURI.toURL), ScalaInstance.topClassLoader) + new URLClassLoader(libraryJars.map(_.toURI.toURL), ScalaInstance.topClassLoader) override lazy val loader: ClassLoader = { // For some exceptionally weird reason, we need to load all jars for dotty here - val jarsToLoad = if (isDotty) allJars else allJars.filterNot(_ == libraryJar) + val jarsToLoad = if (isDotty) allJars else allJars.filterNot(jar => libraryJars.contains(jar)) new URLClassLoader(jarsToLoad.map(_.toURI.toURL), loaderLibraryOnly) } diff --git a/backend/src/main/scala/bloop/scalasig/PickleMarker.scala b/backend/src/main/scala/bloop/scalasig/PickleMarker.scala deleted file mode 100644 index 3e7c66ef42..0000000000 --- a/backend/src/main/scala/bloop/scalasig/PickleMarker.scala +++ /dev/null @@ -1,60 +0,0 @@ -// Imported from twitter/rsc with minor modifications -// Copyright (c) 2017-2019 Twitter, Inc. -// Licensed under the Apache License, Version 2.0 (see LICENSE.md). -package bloop.scalasig - -import org.objectweb.asm.CustomAttribute - -final class PickleMarker extends CustomAttribute("ScalaSig", PickleMarker.bytes) - -object PickleMarker { - val bytes: Array[Byte] = { - val writer = new PickleWriter - writer.writeVarint(5) // Major pickle version - writer.writeVarint(0) // Minor pickle version - writer.writeVarint(0) - writer.toByteArray - } - - final class PickleWriter { - private var bytes = new Array[Byte](1024) - var offset = 0 - - def writeByte(x: Int): Unit = { - val requestedLen = offset + 1 - if (requestedLen > bytes.length) { - val bytes1 = new Array[Byte](requestedLen * 2) - Array.copy(bytes, 0, bytes1, 0, offset) - bytes = bytes1 - } - bytes(offset) = x.toByte - offset += 1 - } - - // NOTE: Write a 32-bit number as a base-128 varint. - // To learn more what a varint means, check out: - // https://developers.google.com/protocol-buffers/docs/encoding#varints - def writeVarint(x: Int): Unit = { - writeVarlong(x.toLong & 0x00000000ffffffffL) - } - - // NOTE: Write a 64-bit number as a base-128 varint. - // To learn more what a varint means, check out: - // https://developers.google.com/protocol-buffers/docs/encoding#varints - def writeVarlong(x: Long): Unit = { - def writePrefix(x: Long): Unit = { - val y = x >>> 7 - if (y != 0L) writePrefix(y) - writeByte(((x & 0x7f) | 0x80).toInt) - } - val y = x >>> 7 - if (y != 0L) writePrefix(y) - writeByte((x & 0x7f).toInt) - } - - def toByteArray: Array[Byte] = { - import java.util.Arrays - Arrays.copyOfRange(bytes, 0, offset) - } - } -} diff --git a/backend/src/main/scala/bloop/scalasig/ScalaSig.scala b/backend/src/main/scala/bloop/scalasig/ScalaSig.scala deleted file mode 100644 index 36cbe27394..0000000000 --- a/backend/src/main/scala/bloop/scalasig/ScalaSig.scala +++ /dev/null @@ -1,71 +0,0 @@ -package bloop - -import bloop.scalasig.ScalaSigWriter -import bloop.io.AbsolutePath -import bloop.scalasig.PickleMarker - -import monix.eval.Task -import java.nio.file.Files -import java.nio.charset.StandardCharsets -import bloop.logging.Logger -import bloop.logging.DebugFilter -import xsbti.compile.Signature - -object ScalaSig { - def write(picklesDir: AbsolutePath, sig: Signature, logger: Logger): Task[Unit] = { - Task { - val targetPicklePath = picklesDir.resolve(sig.name) - val targetPickleParentPath = targetPicklePath.getParent - val rawClassFileName = targetPicklePath.underlying.getFileName().toString - val dummyClassPath = targetPickleParentPath.resolve(s"${rawClassFileName}.class") - val classFileName = { - if (rawClassFileName.endsWith("package") || rawClassFileName.endsWith("package$")) { - s"${targetPickleParentPath.toString}.$rawClassFileName" - } else { - rawClassFileName - } - } - - val bytes = toBinary(rawClassFileName, sig) - logger.debug(s"Writing pickle to $dummyClassPath")(DebugFilter.Compilation) - Files.write(dummyClassPath.underlying, bytes) - () - } - } - - def toBinary(className: String, sig: Signature): Array[Byte] = { - import org.objectweb.asm._ - import org.objectweb.asm.Opcodes._ - import org.objectweb.asm.tree._ - val classWriter = new ClassWriter(0) - classWriter.visit( - V1_8, - ACC_PUBLIC + ACC_SUPER, - className, - null, - "java/lang/Object", - null - ) - /*if (classfile.source.nonEmpty) { - classWriter.visitSource(classfile.source, null) - }*/ - val packedScalasig = ScalaSigWriter.packScalasig(sig.content) - packedScalasig match { - case Array(packedScalasig) => - val desc = "Lscala/reflect/ScalaSignature;" - val av = classWriter.visitAnnotation(desc, true) - av.visit("bytes", packedScalasig) - av.visitEnd() - case packedScalasigChunks => - val desc = "Lscala/reflect/ScalaLongSignature;" - val av = classWriter.visitAnnotation(desc, true) - val aav = av.visitArray("bytes") - packedScalasigChunks.foreach(aav.visit("bytes", _)) - aav.visitEnd() - av.visitEnd() - } - classWriter.visitAttribute(new PickleMarker) - classWriter.visitEnd() - classWriter.toByteArray - } -} diff --git a/backend/src/main/scala/bloop/scalasig/ScalaSigWriter.scala b/backend/src/main/scala/bloop/scalasig/ScalaSigWriter.scala deleted file mode 100644 index aaf5984a5e..0000000000 --- a/backend/src/main/scala/bloop/scalasig/ScalaSigWriter.scala +++ /dev/null @@ -1,144 +0,0 @@ -// Imported from twitter/rsc with minor modifications -// Copyright (c) 2017-2019 Twitter, Inc. -// Licensed under the Apache License, Version 2.0 (see LICENSE.md). -// NOTE: This file has been partially copy/pasted from scala/scala. -package bloop.scalasig - -// NOTE: While ClassfileReader is documented, ClassfileWriter is not. -// The implementation in Scalac seems to be inconsistent with the official -// documentation at: https://docs.scala-lang.org/sips/picked-signatures.html. -// * https://github.com/scala/scala/blob/v2.12.6/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala -// * https://github.com/scala/scala/blob/v2.12.6/src/reflect/scala/reflect/internal/AnnotationInfos.scala -// * https://github.com/scala/scala/blob/v2.12.6/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala -object ScalaSigWriter { - def packScalasig(unpackedScalasig: Array[Byte]): Array[String] = { - val ubytes = mapToNextModSevenBits(encode8to7(unpackedScalasig)) - if (needsScalaLongSignature(ubytes)) ubytesToArray(ubytes) - else Array(ubytesToString(ubytes)) - } - - private def encode8to7(src: Array[Byte]): Array[Byte] = { - val srclen = src.length - val dstlen = (srclen * 8 + 6) / 7 - val dst = new Array[Byte](dstlen) - var i = 0 - var j = 0 - while (i + 6 < srclen) { - var in: Int = src(i) & 0xff - dst(j) = (in & 0x7f).toByte - var out: Int = in >>> 7 - in = src(i + 1) & 0xff - dst(j + 1) = (out | (in << 1) & 0x7f).toByte - out = in >>> 6 - in = src(i + 2) & 0xff - dst(j + 2) = (out | (in << 2) & 0x7f).toByte - out = in >>> 5 - in = src(i + 3) & 0xff - dst(j + 3) = (out | (in << 3) & 0x7f).toByte - out = in >>> 4 - in = src(i + 4) & 0xff - dst(j + 4) = (out | (in << 4) & 0x7f).toByte - out = in >>> 3 - in = src(i + 5) & 0xff - dst(j + 5) = (out | (in << 5) & 0x7f).toByte - out = in >>> 2 - in = src(i + 6) & 0xff - dst(j + 6) = (out | (in << 6) & 0x7f).toByte - out = in >>> 1 - dst(j + 7) = out.toByte - i += 7 - j += 8 - } - if (i < srclen) { - var in: Int = src(i) & 0xff - dst(j) = (in & 0x7f).toByte; j += 1 - var out: Int = in >>> 7 - if (i + 1 < srclen) { - in = src(i + 1) & 0xff - dst(j) = (out | (in << 1) & 0x7f).toByte; j += 1 - out = in >>> 6 - if (i + 2 < srclen) { - in = src(i + 2) & 0xff - dst(j) = (out | (in << 2) & 0x7f).toByte; j += 1 - out = in >>> 5 - if (i + 3 < srclen) { - in = src(i + 3) & 0xff - dst(j) = (out | (in << 3) & 0x7f).toByte; j += 1 - out = in >>> 4 - if (i + 4 < srclen) { - in = src(i + 4) & 0xff - dst(j) = (out | (in << 4) & 0x7f).toByte; j += 1 - out = in >>> 3 - if (i + 5 < srclen) { - in = src(i + 5) & 0xff - dst(j) = (out | (in << 5) & 0x7f).toByte; j += 1 - out = in >>> 2 - } - } - } - } - } - if (j < dstlen) dst(j) = out.toByte - } - dst - } - - private def mapToNextModSevenBits(src: Array[Byte]): Array[Byte] = { - var i = 0 - val srclen = src.length - while (i < srclen) { - val in = src(i) - src(i) = (if (in == 0x7f) 0.toByte else (in + 1).toByte) - i += 1 - } - src - } - - private def needsScalaLongSignature(src: Array[Byte]): Boolean = { - var i = 0 - var numZeros = 0 - while (i < src.length) { - if (src(i) == 0) numZeros += 1 - i += 1 - } - (src.length + numZeros) >= 65536 - } - - private def ubytesToString(ubytes: Array[Byte]): String = { - val chars = new Array[Char](ubytes.length) - var i = 0 - while (i < ubytes.length) { - val b: Byte = ubytes(i) - assert((b & ~0x7f) == 0) - chars(i) = b.asInstanceOf[Char] - i += 1 - } - new String(chars) - } - - private def ubytesToArray(ubytes: Array[Byte]): Array[String] = { - var strs: List[String] = Nil - var prevOffset = 0 - var offset = 0 - var encLength = 0 - while (offset < ubytes.length) { - val deltaEncLength = if (ubytes(offset) == 0) 2 else 1 - val newEncLength = encLength.toLong + deltaEncLength - if (newEncLength >= 65535) { - val ba = ubytes.slice(prevOffset, offset) - strs ::= ubytesToString(ba) - encLength = 0 - prevOffset = offset - } else { - encLength += deltaEncLength - offset += 1 - } - } - if (prevOffset < offset) { - assert(offset == ubytes.length) - val ba = ubytes.slice(prevOffset, offset) - strs ::= ubytesToString(ba) - } - strs.reverse.toArray - } -} diff --git a/backend/src/main/scala/sbt/internal/inc/BloopComponentCompiler.scala b/backend/src/main/scala/sbt/internal/inc/BloopComponentCompiler.scala index 363a5c9d1d..833e5f2f12 100644 --- a/backend/src/main/scala/sbt/internal/inc/BloopComponentCompiler.scala +++ b/backend/src/main/scala/sbt/internal/inc/BloopComponentCompiler.scala @@ -63,8 +63,8 @@ object BloopComponentCompiler { val (isDotty, organization, version) = scalaInstance match { case instance: BloopScalaInstance => if (instance.isDotty) (true, instance.organization, instance.version) - else (false, "ch.epfl.scala", latestVersion) - case instance: ScalaInstance => (false, "ch.epfl.scala", latestVersion) + else (false, "org.scala-sbt", latestVersion) + case instance: ScalaInstance => (false, "org.scala-sbt", latestVersion) } val bridgeId = compilerBridgeId(scalaInstance.version) @@ -102,24 +102,14 @@ object BloopComponentCompiler { scheduler: ExecutionContext ) extends CompilerBridgeProvider { - private def is213ThatNeedsPreviousZinc(scalaVersion: String): Boolean = { - scalaVersion.startsWith("2.13.0") || - scalaVersion.startsWith("2.13.1") || - scalaVersion.startsWith("2.13.2") - } - /** * Defines a richer interface for Scala users that want to pass in an explicit module id. * * Note that this method cannot be defined in [[CompilerBridgeProvider]] because [[ModuleID]] * is a Scala-defined class to which the compiler bridge cannot depend on. */ - private def compiledBridge(bridgeSources0: ModuleID, scalaInstance: ScalaInstance): File = { + private def compiledBridge(bridgeSources: ModuleID, scalaInstance: ScalaInstance): File = { val scalaVersion = scalaInstance.version() - val bridgeSources = - if (is213ThatNeedsPreviousZinc(scalaVersion)) - bridgeSources0.withRevision("1.3.0-M4+42-5daa8ed7") - else bridgeSources0 val raw = new RawCompiler(scalaInstance, ClasspathOptionsUtil.auto, logger) val zinc = new BloopComponentCompiler(raw, manager, bridgeSources, logger, scheduler) logger.debug(s"Getting $bridgeSources for Scala ${scalaInstance.version}")( diff --git a/backend/src/main/scala/sbt/internal/inc/BloopZincLibraryManagement.scala b/backend/src/main/scala/sbt/internal/inc/BloopZincLibraryManagement.scala index 4d53abd51a..34c10ddd38 100644 --- a/backend/src/main/scala/sbt/internal/inc/BloopZincLibraryManagement.scala +++ b/backend/src/main/scala/sbt/internal/inc/BloopZincLibraryManagement.scala @@ -42,6 +42,12 @@ object BloopZincLibraryManagement { scheduler ) val loader = Some(new ClassLoaderCache(new URLClassLoader(new Array(0)))) - new AnalyzingCompiler(scalaInstance, compilerBridgeProvider, _ => (), loader) + new AnalyzingCompiler( + scalaInstance, + compilerBridgeProvider, + ClasspathOptionsUtil.boot(), + _ => (), + loader + ) } } diff --git a/backend/src/main/scala/sbt/internal/inc/bloop/BloopZincCompiler.scala b/backend/src/main/scala/sbt/internal/inc/bloop/BloopZincCompiler.scala index 8244f92469..711162eeff 100644 --- a/backend/src/main/scala/sbt/internal/inc/bloop/BloopZincCompiler.scala +++ b/backend/src/main/scala/sbt/internal/inc/bloop/BloopZincCompiler.scala @@ -4,7 +4,6 @@ package sbt.internal.inc.bloop import java.io.File import java.util.concurrent.CompletableFuture -import bloop.{CompileMode, CompilerOracle} import bloop.reporter.ZincReporter import bloop.logging.ObservedLogger import bloop.tracing.BraveTracer @@ -44,13 +43,13 @@ object BloopZincCompiler { */ def compile( in: Inputs, - compileMode: CompileMode, reporter: ZincReporter, logger: ObservedLogger[_], uniqueInputs: UniqueCompileInputs, manager: ClassFileManager, cancelPromise: Promise[Unit], - tracer: BraveTracer + tracer: BraveTracer, + classpathOptions: ClasspathOptions ): Task[CompileResult] = { val config = in.options() val setup = in.setup() @@ -81,7 +80,6 @@ object BloopZincCompiler { skip, incrementalCompilerOptions, extraOptions, - compileMode, manager, cancelPromise, tracer @@ -109,7 +107,6 @@ object BloopZincCompiler { skip: Boolean = false, incrementalOptions: IncOptions, extra: List[(String, String)], - compileMode: CompileMode, manager: ClassFileManager, cancelPromise: Promise[Unit], tracer: BraveTracer @@ -126,12 +123,12 @@ object BloopZincCompiler { if (skip) Task.now(CompileResult.of(prev, config.currentSetup, false)) else { val setOfSources = sources.toSet - val compiler = BloopHighLevelCompiler(config, reporter, logger, tracer) + val compiler = BloopHighLevelCompiler(config, reporter, logger, tracer, classpathOptions) val lookup = new BloopLookup(config, previousSetup, logger) val analysis = invalidateAnalysisFromSetup(config.currentSetup, previousSetup, incrementalOptions.ignoredScalacOptions(), setOfSources, prev, manager, logger) // Scala needs the explicit type signature to infer the function type arguments - val compile: (Set[File], DependencyChanges, AnalysisCallback, ClassFileManager) => Task[Unit] = compiler.compile(_, _, _, _, compileMode, cancelPromise) + val compile: (Set[File], DependencyChanges, AnalysisCallback, ClassFileManager) => Task[Unit] = compiler.compile(_, _, _, _, cancelPromise, classpathOptions) BloopIncremental .compile( setOfSources, @@ -143,7 +140,6 @@ object BloopZincCompiler { logger, reporter, config.incOptions, - compileMode, manager, tracer, HydraSupport.isEnabled(config.compiler.scalaInstance()) @@ -264,7 +260,6 @@ object BloopZincCompiler { MixedAnalyzingCompiler.config( sources, classpath, - classpathOptions, compileSetup, progress, previousAnalysis, diff --git a/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopAnalysisCallback.scala b/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopAnalysisCallback.scala index 38f97096eb..fb28441ae6 100644 --- a/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopAnalysisCallback.scala +++ b/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopAnalysisCallback.scala @@ -31,15 +31,11 @@ import sbt.internal.inc.Analysis import sbt.internal.inc.Compilation import sbt.internal.inc.SourceInfos -import bloop.CompileMode -import xsbti.compile.Signature - trait IBloopAnalysisCallback extends xsbti.AnalysisCallback { def get: Analysis } final class BloopAnalysisCallback( - compileMode: CompileMode, internalBinaryToSourceClassName: String => Option[String], internalSourceToClassNamesMap: File => Set[String], externalAPI: (File, String) => Option[AnalyzedClass], @@ -365,22 +361,4 @@ final class BloopAnalysisCallback( override def dependencyPhaseCompleted(): Unit = () override def classesInOutputJar(): java.util.Set[String] = ju.Collections.emptySet() - override def definedMacro(symbolName: String): Unit = { - compileMode.oracle.registerDefinedMacro(symbolName) - } - - override def invokedMacro(invokedMacroSymbol: String): Unit = { - compileMode.oracle.blockUntilMacroClasspathIsReady(invokedMacroSymbol) - } - - override def isPipeliningEnabled(): Boolean = compileMode.oracle.isPipeliningEnabled - override def downstreamSignatures(): Array[Signature] = - compileMode.oracle.collectDownstreamSignatures() - override def definedSignatures(signatures: Array[Signature]): Unit = { - compileMode.oracle.startDownstreamCompilations(signatures) - } - - override def invalidatedClassFiles(): Array[File] = { - manager.invalidatedClassFiles() - } } diff --git a/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopHighLevelCompiler.scala b/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopHighLevelCompiler.scala index a9825d9bff..433de064a8 100644 --- a/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopHighLevelCompiler.scala +++ b/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopHighLevelCompiler.scala @@ -7,7 +7,7 @@ import java.util.concurrent.CompletableFuture import bloop.reporter.ZincReporter import bloop.logging.ObservedLogger -import bloop.{CompileMode, JavaSignal} +import bloop.JavaSignal import bloop.tracing.BraveTracer import monix.eval.Task @@ -63,8 +63,8 @@ final class BloopHighLevelCompiler( changes: DependencyChanges, callback: AnalysisCallback, classfileManager: ClassFileManager, - compileMode: CompileMode, - cancelPromise: Promise[Unit] + cancelPromise: Promise[Unit], + classpathOptions: ClasspathOptions ): Task[Unit] = { def timed[T](label: String)(t: => T): T = { tracer.trace(label) { _ => @@ -91,14 +91,9 @@ final class BloopHighLevelCompiler( reporter.reportStartIncrementalCycle(includedSources, outputDirs) } - // Note `pickleURI` has already been used to create the analysis callback in `BloopZincCompiler` - val (pipeline: Boolean, batches: Option[Int], completeJava: Promise[Unit], fireJavaCompilation: Task[JavaSignal], separateJavaAndScala: Boolean) = { - compileMode match { - case _: CompileMode.Sequential => (false, None, JavaCompleted, Task.now(JavaSignal.ContinueCompilation), false) - case CompileMode.Pipelined(completeJava, _, fireJavaCompilation, _, separateJavaAndScala) => - (true, None, completeJava, fireJavaCompilation, separateJavaAndScala) - } - } + val completeJava = JavaCompleted + val separateJavaAndScala = false + val fireJavaCompilation = Task.now(JavaSignal.ContinueCompilation) // Complete empty java promise if there are no java sources if (javaSources.isEmpty && !completeJava.isCompleted) @@ -108,13 +103,11 @@ final class BloopHighLevelCompiler( if (scalaSources.isEmpty) Task.now(()) else { val sources = { - if (separateJavaAndScala) { + if (separateJavaAndScala || setup.order == CompileOrder.Mixed) { // No matter if it's scala->java or mixed, we populate java symbols from sources - val transitiveJavaSources = compileMode.oracle.askForJavaSourcesOfIncompleteCompilations - includedSources ++ transitiveJavaSources.filterNot(_.getName == "routes.java") + includedSources } else { - if (setup.order == CompileOrder.Mixed) includedSources - else scalaSources + scalaSources } } @@ -124,11 +117,11 @@ final class BloopHighLevelCompiler( throw new CompileFailed(new Array(0), s"Expected Scala compiler jar in Scala instance containing ${scalac.scalaInstance.allJars().mkString(", ")}", new Array(0)) } - if (scalac.scalaInstance.libraryJar() == null) { + if (scalac.scalaInstance.libraryJars().isEmpty) { throw new CompileFailed(new Array(0), s"Expected Scala library jar in Scala instance containing ${scalac.scalaInstance.allJars().mkString(", ")}", new Array(0)) } - new CompilerArguments(scalac.scalaInstance, config.classpathOptions) + new CompilerArguments(scalac.scalaInstance, classpathOptions) } def compileSources( @@ -160,10 +153,7 @@ final class BloopHighLevelCompiler( } } - batches match { - case Some(batches) => sys.error("Parallel compilation is not yet supported!") - case None => compileSequentially - } + compileSequentially } } @@ -190,34 +180,20 @@ final class BloopHighLevelCompiler( val combinedTasks = { if (separateJavaAndScala) { - val compileJavaSynchronized = { - fireJavaCompilation.flatMap { - case JavaSignal.ContinueCompilation => compileJava - case JavaSignal.FailFastCompilation(failedProjects) => - throw new StopPipelining(failedProjects) - } - } - if (javaSources.isEmpty) compileScala else { if (setup.order == CompileOrder.JavaThenScala) { - Task.gatherUnordered(List(compileJavaSynchronized, compileScala)).map(_ => ()) + Task.gatherUnordered(List(compileJava, compileScala)).map(_ => ()) } else { - compileScala.flatMap(_ => compileJavaSynchronized) + compileScala.flatMap(_ => compileJava) } } } else { // Note that separate java and scala is not enabled under pipelining - fireJavaCompilation.flatMap { - case JavaSignal.ContinueCompilation => - if (setup.order == CompileOrder.JavaThenScala) { - compileJava.flatMap(_ => compileScala) - } else { - compileScala.flatMap(_ => compileJava) - } - - case JavaSignal.FailFastCompilation(failedProjects) => - throw new StopPipelining(failedProjects) + if (setup.order == CompileOrder.JavaThenScala) { + compileJava.flatMap(_ => compileScala) + } else { + compileScala.flatMap(_ => compileJava) } } } @@ -236,10 +212,10 @@ final class BloopHighLevelCompiler( } object BloopHighLevelCompiler { - def apply(config: CompileConfiguration, reporter: ZincReporter, logger: ObservedLogger[_], tracer: BraveTracer): BloopHighLevelCompiler = { + def apply(config: CompileConfiguration, reporter: ZincReporter, logger: ObservedLogger[_], tracer: BraveTracer, classpathOptions: ClasspathOptions): BloopHighLevelCompiler = { val (searchClasspath, entry) = MixedAnalyzingCompiler.searchClasspathAndLookup(config) val scalaCompiler = config.compiler.asInstanceOf[AnalyzingCompiler] - val javaCompiler = new AnalyzingJavaCompiler(config.javac, config.classpath, config.compiler.scalaInstance, config.classpathOptions, entry, searchClasspath) + val javaCompiler = new AnalyzingJavaCompiler(config.javac, config.classpath, config.compiler.scalaInstance, classpathOptions, entry, searchClasspath) new BloopHighLevelCompiler(scalaCompiler, javaCompiler, config, reporter, logger, tracer) } } diff --git a/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopIncremental.scala b/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopIncremental.scala index caefd3aed5..121139a322 100644 --- a/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopIncremental.scala +++ b/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopIncremental.scala @@ -4,11 +4,9 @@ package sbt.internal.inc.bloop.internal import java.io.File import java.util.concurrent.CompletableFuture -import bloop.CompilerOracle import bloop.UniqueCompileInputs import bloop.reporter.ZincReporter import bloop.tracing.BraveTracer -import bloop.CompileMode import monix.eval.Task import sbt.internal.inc.{Analysis, InvalidationProfiler, Lookup, Stamper, Stamps} @@ -31,7 +29,6 @@ object BloopIncremental { log: Logger, reporter: ZincReporter, options: IncOptions, - mode: CompileMode, manager: ClassFileManager, tracer: BraveTracer, isHydraEnabled: Boolean @@ -53,9 +50,9 @@ object BloopIncremental { val internalSourceToClassNamesMap: File => Set[String] = (f: File) => previousRelations.classNames(f) val builder: () => IBloopAnalysisCallback = { - if (!isHydraEnabled) () => new BloopAnalysisCallback(mode, internalBinaryToSourceClassName, internalSourceToClassNamesMap, externalAPI, current, output, options, manager) + if (!isHydraEnabled) () => new BloopAnalysisCallback(internalBinaryToSourceClassName, internalSourceToClassNamesMap, externalAPI, current, output, options, manager) else - () => new ConcurrentAnalysisCallback(mode, internalBinaryToSourceClassName, internalSourceToClassNamesMap, externalAPI, current, output, options, manager) + () => new ConcurrentAnalysisCallback(internalBinaryToSourceClassName, internalSourceToClassNamesMap, externalAPI, current, output, options, manager) } // We used to catch for `CompileCancelled`, but we prefer to propagate it so that Bloop catches it compileIncremental(sources, uniqueInputs, lookup, previous, current, compile, builder, reporter, log, output, options, manager, tracer) diff --git a/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopNameHashing.scala b/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopNameHashing.scala index 3065998af3..2a8b684250 100644 --- a/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopNameHashing.scala +++ b/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopNameHashing.scala @@ -2,7 +2,6 @@ package sbt.internal.inc.bloop.internal import java.io.File -import _root_.bloop.CompilerOracle import _root_.bloop.UniqueCompileInputs import _root_.bloop.reporter.ZincReporter import _root_.bloop.tracing.BraveTracer diff --git a/backend/src/main/scala/sbt/internal/inc/bloop/internal/ConcurrentAnalysisCallback.scala b/backend/src/main/scala/sbt/internal/inc/bloop/internal/ConcurrentAnalysisCallback.scala index 21535e1402..1dd0e9a181 100644 --- a/backend/src/main/scala/sbt/internal/inc/bloop/internal/ConcurrentAnalysisCallback.scala +++ b/backend/src/main/scala/sbt/internal/inc/bloop/internal/ConcurrentAnalysisCallback.scala @@ -31,9 +31,6 @@ import sbt.internal.inc.Analysis import sbt.internal.inc.Compilation import sbt.internal.inc.SourceInfos -import bloop.CompileMode -import xsbti.compile.Signature - /** * This class provides a thread-safe implementation of `xsbti.AnalysisCallback` which is required to compile with the * Triplequote Hydra compiler. @@ -45,7 +42,6 @@ import xsbti.compile.Signature * IMPORTANT: All modifications made to BloopAnalysisCallback` must be replicated here. */ final class ConcurrentAnalysisCallback( - compileMode: CompileMode, internalBinaryToSourceClassName: String => Option[String], internalSourceToClassNamesMap: File => Set[String], externalAPI: (File, String) => Option[AnalyzedClass], @@ -378,22 +374,4 @@ final class ConcurrentAnalysisCallback( override def dependencyPhaseCompleted(): Unit = () override def classesInOutputJar(): java.util.Set[String] = ju.Collections.emptySet() - override def definedMacro(symbolName: String): Unit = { - compileMode.oracle.registerDefinedMacro(symbolName) - } - - override def invokedMacro(invokedMacroSymbol: String): Unit = { - compileMode.oracle.blockUntilMacroClasspathIsReady(invokedMacroSymbol) - } - - override def isPipeliningEnabled(): Boolean = compileMode.oracle.isPipeliningEnabled - override def downstreamSignatures(): Array[Signature] = - compileMode.oracle.collectDownstreamSignatures() - override def definedSignatures(signatures: Array[Signature]): Unit = { - compileMode.oracle.startDownstreamCompilations(signatures) - } - - override def invalidatedClassFiles(): Array[File] = { - manager.invalidatedClassFiles() - } } diff --git a/backend/src/main/scala/sbt/internal/inc/bloop/internal/StopPipelining.scala b/backend/src/main/scala/sbt/internal/inc/bloop/internal/StopPipelining.scala deleted file mode 100644 index 93d2b44a26..0000000000 --- a/backend/src/main/scala/sbt/internal/inc/bloop/internal/StopPipelining.scala +++ /dev/null @@ -1,15 +0,0 @@ -package sbt.internal.inc.bloop.internal - -/** - * Defines a stop point for pipelined compilation. - * - * Pipelining forces the compilation of dependent modules while dependent modules are being - * compiled. If there is an error in any of the previous Scala projects, the compilation - * of the projects that depend on the failed project need to fail fast. - * - * `StopPipelining` is the way to stop pipelined compilation from the guts of Zinc. We throw - * this exception from deep inside `BloopHighLevelCompiler`, and then we catch it in - * `bloop.Compiler` and translate it to a `Compiler.Blocked` result. - */ -final class StopPipelining(val failedProjectNames: List[String]) - extends Exception(s"Pipelining stopped, projects ${failedProjectNames} failed to compile.") diff --git a/backend/src/test/scala/bloop/CompilerCacheSpec.scala b/backend/src/test/scala/bloop/CompilerCacheSpec.scala index 4af1be6ee3..d83f182de5 100644 --- a/backend/src/test/scala/bloop/CompilerCacheSpec.scala +++ b/backend/src/test/scala/bloop/CompilerCacheSpec.scala @@ -62,7 +62,6 @@ class CompilerCacheSpec { val classFileManager = new ClassFileManager { override def delete(classes: Array[File]): Unit = () - override def invalidatedClassFiles(): Array[File] = Array.empty override def generated(classes: Array[File]): Unit = () override def complete(success: Boolean): Unit = () } diff --git a/benchmark-bridge b/benchmark-bridge index aae5156dcb..8489b0de9f 160000 --- a/benchmark-bridge +++ b/benchmark-bridge @@ -1 +1 @@ -Subproject commit aae5156dcb41f55b24836f33da28d835fb5b9265 +Subproject commit 8489b0de9ff82d7bbe2450da07b938fabb6605b1 diff --git a/bin/run-benchmarks.sh b/bin/run-benchmarks.sh index 50f12f511d..871b586136 100755 --- a/bin/run-benchmarks.sh +++ b/bin/run-benchmarks.sh @@ -91,23 +91,23 @@ main() { #"-wi 4 -i 4 -f1 -t1 -p project=lichess -p projectName=lila-test" #"-wi 15 -i 10 -f1 -t1 -p project=sbt -p projectName=sbtRoot" #"-wi 8 -i 5 -f1 -t1 -p project=frontend -p projectName=root-test" - #"-wi 8 -i 5 -f1 -t1 -p project=finagle -p projectName=finagle-test" + "-wi 8 -i 5 -f1 -t1 -p project=finagle -p projectName=finagle-test" "-wi 10 -i 10 -f1 -t1 -p project=algebird -p projectName=algebird-test" - #"-wi 20 -i 10 -f1 -t1 -p project=scalatra -p projectName=scalatra-project-test" + "-wi 20 -i 10 -f1 -t1 -p project=scalatra -p projectName=scalatra-project-test" "-wi 15 -i 5 -f1 -t1 -p project=atlas -p projectName=root-test" "-wi 20 -i 10 -f1 -t1 -p project=grid -p projectName=grid-test" - #"-wi 7 -i 5 -f1 -t1 -p project=akka -p projectName=akka-test" - #"-wi 10 -i 5 -f1 -t1 -p project=circe -p projectName=circe-test" + "-wi 7 -i 5 -f1 -t1 -p project=akka -p projectName=akka-test" + "-wi 10 -i 5 -f1 -t1 -p project=circe -p projectName=circe-test" #"-wi 10 -i 5 -f1 -t1 -p project=linkerd -p projectName=all-test" - #"-wi 20 -i 10 -f1 -t1 -p project=summingbird -p projectName=summingbird-test" + "-wi 20 -i 10 -f1 -t1 -p project=summingbird -p projectName=summingbird-test" "-wi 5 -i 5 -f1 -t1 -p project=http4s -p projectName=root-test" #"-wi 15 -i 10 -f1 -t1 -p project=gatling -p projectName=gatling-parent-test" #"-wi 5 -i 5 -f1 -t1 -p project=marathon -p projectName=marathon-test" #"-wi 15 -i 5 -f1 -t1 -p project=coursier -p projectName=coursier-repo-test" #"-wi 10 -i 5 -f1 -t1 -p project=prisma -p projectName=root-test" - #"-wi 5 -i 3 -f1 -t1 -p project=cats -p projectName=cats-test" # compiles hot in 3 minutes + # "-wi 5 -i 3 -f1 -t1 -p project=cats -p projectName=cats-test" # compiles hot in 3 minutes #"-wi 2 -i 3 -f1 -t1 -p project=scalding -p projectName=scalding-test" - "-wi 2 -i 3 -f1 -t1 -p project=scio -p projectName=scio+test" + #"-wi 2 -i 3 -f1 -t1 -p project=scio -p projectName=scio+test" ) JAVA_HOMES=( @@ -122,7 +122,7 @@ main() { for java_home in "${JAVA_HOMES[@]}"; do for benchmark in "${SBT_BLOOP_BENCHMARKS[@]}"; do - SBT_COMMANDS+=("$JMH_CMD .*Hot(Bloop|PipelinedBloop|Sbt)Benchmark.* $benchmark -jvm $java_home") + SBT_COMMANDS+=("$JMH_CMD .*Hot(Bloop|Sbt)Benchmark.* $benchmark -jvm $java_home") done done diff --git a/frontend/src/main/scala/bloop/bsp/BloopBspServices.scala b/frontend/src/main/scala/bloop/bsp/BloopBspServices.scala index 546d923787..79cf74db7b 100644 --- a/frontend/src/main/scala/bloop/bsp/BloopBspServices.scala +++ b/frontend/src/main/scala/bloop/bsp/BloopBspServices.scala @@ -12,7 +12,7 @@ import bloop.io.Environment.lineSeparator import bloop.io.ServerHandle import bloop.util.JavaRuntime import bloop.bsp.BloopBspDefinitions.BloopExtraBuildParams -import bloop.{CompileMode, Compiler, ScalaInstance} +import bloop.{Compiler, ScalaInstance} import bloop.cli.{Commands, ExitStatus, Validate} import bloop.dap.{DebugServerLogger, BloopDebuggeeRunner} import bloop.data.{ClientInfo, JdkConfig, Platform, Project, WorkspaceSettings} diff --git a/frontend/src/main/scala/bloop/data/ClientInfo.scala b/frontend/src/main/scala/bloop/data/ClientInfo.scala index db6ba98eba..4db020a0ab 100644 --- a/frontend/src/main/scala/bloop/data/ClientInfo.scala +++ b/frontend/src/main/scala/bloop/data/ClientInfo.scala @@ -285,10 +285,10 @@ object ClientInfo { val attrs = Files.readAttributes(clientDir.underlying, classOf[BasicFileAttributes]) val isOldDir = attrs.creationTime.toInstant.isBefore(deletionThresholdInstant) - val isWhitelisted = CliClientInfo.isStableDirName(dirName) || + val isAllowed = CliClientInfo.isStableDirName(dirName) || connectedBspClientIds.exists(clientId => dirName.endsWith(s"-$clientId")) - if (isWhitelisted || !isOldDir) () + if (isAllowed || !isOldDir) () else { out.println(s"Deleting orphan directory ${clientDir}") bloop.io.Paths.delete(clientDir) diff --git a/frontend/src/main/scala/bloop/engine/Interpreter.scala b/frontend/src/main/scala/bloop/engine/Interpreter.scala index 4658279c5a..d53ffc3b5e 100644 --- a/frontend/src/main/scala/bloop/engine/Interpreter.scala +++ b/frontend/src/main/scala/bloop/engine/Interpreter.scala @@ -1,6 +1,5 @@ package bloop.engine -import bloop.CompileMode import bloop.bsp.BspServer import bloop.cli._ import bloop.cli.completion.{Case, Mode} diff --git a/frontend/src/main/scala/bloop/engine/caches/LastSuccessfulResult.scala b/frontend/src/main/scala/bloop/engine/caches/LastSuccessfulResult.scala index 7651add37b..00bfdf5553 100644 --- a/frontend/src/main/scala/bloop/engine/caches/LastSuccessfulResult.scala +++ b/frontend/src/main/scala/bloop/engine/caches/LastSuccessfulResult.scala @@ -1,7 +1,6 @@ package bloop.engine.caches import bloop.Compiler -import bloop.CompilerOracle import bloop.CompileProducts import bloop.data.Project import bloop.io.AbsolutePath diff --git a/frontend/src/main/scala/bloop/engine/caches/ResultsCache.scala b/frontend/src/main/scala/bloop/engine/caches/ResultsCache.scala index a9a7895106..dbdd5f2e3f 100644 --- a/frontend/src/main/scala/bloop/engine/caches/ResultsCache.scala +++ b/frontend/src/main/scala/bloop/engine/caches/ResultsCache.scala @@ -227,10 +227,8 @@ object ResultsCache { val dummy = ObservedLogger.dummy(logger, ExecutionContext.ioScheduler) val reporter = new LogReporter(p, dummy, cwd, ReporterConfig.defaultFormat) - // TODO: Figure out a way to populate macros from previous run after restart - val ms = new Array[String](0) val products = - CompileProducts(classesDir, classesDir, r, r, Set.empty, Map.empty, ms) + CompileProducts(classesDir, classesDir, r, r, Set.empty, Map.empty) val bundle = ResultBundle( Result.Success(inputs, reporter, products, 0L, dummyTasks, false, false), Some(LastSuccessfulResult(inputs, products, Task.now(()))), diff --git a/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala b/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala index fc03d2b706..13353e84f2 100644 --- a/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala +++ b/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala @@ -16,7 +16,6 @@ import bloop.{ CompileBackgroundTasks, CompileExceptions, CompileInputs, - CompileMode, CompileOutPaths, CompileProducts, Compiler @@ -87,13 +86,6 @@ object CompileTask { bundle.prepareSourcesAndInstance match { case Left(earlyResultBundle) => - graphInputs.pipelineInputs match { - case None => () - case Some(inputs) => - inputs.irPromise.trySuccess(new Array(0)) - inputs.finishedCompilation.trySuccess(None) - inputs.completeJava.trySuccess(()) - } compileProjectTracer.terminate() Task.now(earlyResultBundle) case Right(CompileSourcesAndInstance(sources, instance, javaOnly)) => @@ -143,7 +135,6 @@ object CompileTask { previousResult, reporter, logger, - configuration.mode, graphInputs.dependentResults, cancelCompilation, compileProjectTracer, @@ -166,9 +157,6 @@ object CompileTask { waitOnReadClassesDir.flatMap { _ => // Only when the task is finished, we kickstart the compilation inputs.flatMap(inputs => Compiler.compile(inputs)).map { result => - // Post-compilation hook to complete/validate pipelining state - runPipeliningBookkeeping(graphInputs, result, javaOnly, logger) - def runPostCompilationTasks( backgroundTasks: CompileBackgroundTasks ): CancelableFuture[Unit] = { @@ -257,7 +245,7 @@ object CompileTask { } val client = state.client - CompileGraph.traverse(dag, client, store, setup(_), compile(_), pipeline).flatMap { pdag => + CompileGraph.traverse(dag, client, store, setup(_), compile(_)).flatMap { pdag => val partialResults = Dag.dfs(pdag) val finalResults = partialResults.map(r => PartialCompileResult.toFinalResult(r)) Task.gatherUnordered(finalResults).map(_.flatten).flatMap { results => @@ -328,61 +316,13 @@ object CompileTask { } } - case class ConfiguredCompilation(mode: CompileMode, scalacOptions: List[String]) + case class ConfiguredCompilation(scalacOptions: List[String]) private def configureCompilation( project: Project, graphInputs: CompileGraph.Inputs, out: CompileOutPaths ): ConfiguredCompilation = { - graphInputs.pipelineInputs match { - case Some(inputs) => - val scalacOptions = project.scalacOptions - val newMode = CompileMode.Pipelined( - inputs.completeJava, - inputs.finishedCompilation, - inputs.transitiveJavaSignal, - graphInputs.oracle, - inputs.separateJavaAndScala - ) - ConfiguredCompilation(newMode, scalacOptions) - case None => - val newMode = CompileMode.Sequential(graphInputs.oracle) - ConfiguredCompilation(newMode, project.scalacOptions) - } - } - - private def runPipeliningBookkeeping( - inputs: CompileGraph.Inputs, - result: Compiler.Result, - javaOnly: Boolean, - logger: Logger - ): Unit = { - val projectName = inputs.bundle.project.name - // Avoid deadlocks in case pipelining is disabled in the Zinc bridge - inputs.pipelineInputs match { - case None => () - case Some(pipelineInputs) => - result match { - case Compiler.Result.NotOk(_) => - // If error, try to set failure in IR promise; if already completed ignore - pipelineInputs.irPromise.tryFailure(CompileExceptions.FailedOrCancelledPromise); () - case result => - // Complete finished compilation promise with products if success or empty - result match { - case s: Compiler.Result.Success => - pipelineInputs.finishedCompilation.success(Some(s.products)) - case Compiler.Result.Empty => - pipelineInputs.finishedCompilation.trySuccess(None) - case _ => - pipelineInputs.finishedCompilation.tryFailure(CompileExceptions.CompletePromise) - } - - val completed = pipelineInputs.irPromise.tryFailure(CompileExceptions.CompletePromise) - if (completed && !javaOnly) { - logger.warn(s"The project $projectName didn't use pipelined compilation.") - } - } - } + ConfiguredCompilation(project.scalacOptions) } private def populateNewReadOnlyClassesDir( diff --git a/frontend/src/main/scala/bloop/engine/tasks/Tasks.scala b/frontend/src/main/scala/bloop/engine/tasks/Tasks.scala index e00f910f47..55710b703f 100644 --- a/frontend/src/main/scala/bloop/engine/tasks/Tasks.scala +++ b/frontend/src/main/scala/bloop/engine/tasks/Tasks.scala @@ -68,7 +68,7 @@ object Tasks { val opts = ClasspathOptionsUtil.repl val options = project.scalacOptions :+ "-Xnojline" // We should by all means add better error handling here! - compiler.console(entries, options, opts, "", "", state.logger)(Some(loader)) + compiler.console(entries, options, "", "", state.logger)(Some(loader)) case None => logger.error(s"Missing Scala configuration on project '${project.name}'") } diff --git a/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileBundle.scala b/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileBundle.scala index d31ad3a2f2..05cf512eab 100644 --- a/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileBundle.scala +++ b/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileBundle.scala @@ -5,7 +5,7 @@ import bloop.engine.Feedback import bloop.engine.{Dag, ExecutionContext} import bloop.io.{AbsolutePath, Paths} import bloop.io.ByteHasher -import bloop.{Compiler, CompilerOracle, ScalaInstance} +import bloop.{Compiler, ScalaInstance} import bloop.logging.{Logger, ObservedLogger, LoggerAction} import bloop.reporter.{ObservedReporter, ReporterAction} import bloop.tracing.BraveTracer @@ -62,8 +62,6 @@ case object CancelledCompileBundle extends CompileBundle * dependent projects, which is required to create a full classpath. * @param javaSources A list of Java sources in the project. * @param scalaSources A list of Scala sources in the project. - * @param oracleInputs The compiler oracle inputs are the main input to the - * compilation task called by [[CompileGraph]]. * @param cancelCompilation A promise that can be completed to cancel the compilation. * @param reporter A reporter instance that will register every reporter action * produced by the compilation started by this compile bundle. diff --git a/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileDefinitions.scala b/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileDefinitions.scala index c372fb9497..7585cb268e 100644 --- a/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileDefinitions.scala +++ b/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileDefinitions.scala @@ -5,12 +5,10 @@ import bloop.engine.Dag import bloop.PartialCompileProducts import bloop.CompileProducts import bloop.data.Project -import bloop.CompilerOracle import java.io.File import xsbti.compile.PreviousResult import scala.concurrent.Promise import bloop.JavaSignal -import xsbti.compile.Signature object CompileDefinitions { type ProjectId = String @@ -25,11 +23,4 @@ object CompileDefinitions { dependentProducts: Map[Project, BundleProducts] ) - case class PipelineInputs( - irPromise: Promise[Array[Signature]], - finishedCompilation: Promise[Option[CompileProducts]], - completeJava: Promise[Unit], - transitiveJavaSignal: Task[JavaSignal], - separateJavaAndScala: Boolean - ) } diff --git a/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileGraph.scala b/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileGraph.scala index c3b9494b2a..2d408a7d32 100644 --- a/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileGraph.scala +++ b/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileGraph.scala @@ -15,7 +15,7 @@ import bloop.util.SystemProperties import bloop.engine.{Dag, Leaf, Parent, Aggregate, ExecutionContext} import bloop.reporter.ReporterAction import bloop.logging.{Logger, ObservedLogger, LoggerAction, DebugFilter} -import bloop.{Compiler, CompilerOracle, JavaSignal, CompileProducts} +import bloop.{Compiler, JavaSignal, CompileProducts} import bloop.engine.caches.LastSuccessfulResult import bloop.UniqueCompileInputs import bloop.PartialCompileProducts @@ -30,7 +30,6 @@ import xsbti.compile.PreviousResult import scala.concurrent.Promise import scala.util.{Failure, Success} -import xsbti.compile.Signature import scala.collection.mutable import java.{util => ju} import bloop.CompileOutPaths @@ -47,39 +46,14 @@ object CompileGraph { case class Inputs( bundle: SuccessfulCompileBundle, - oracle: CompilerOracle, - pipelineInputs: Option[PipelineInputs], dependentResults: Map[File, PreviousResult] ) - /** - * Turns a dag of projects into a task that returns a dag of compilation results - * that can then be used to debug the evaluation of the compilation within Monix - * and access the compilation results received from Zinc. - * - * @param dag The dag of projects to be compiled. - * @return A task that returns a dag of compilation results. - */ - def traverse( - dag: Dag[Project], - client: ClientInfo, - store: CompileClientStore, - setup: BundleInputs => Task[CompileBundle], - compile: Inputs => Task[ResultBundle], - pipeline: Boolean - ): CompileTraversal = { - /* We use different traversals for normal and pipeline compilation because the - * pipeline traversal has an small overhead (2-3%) for some projects. Check - * https://benchs.scala-lang.org/dashboard/snapshot/sLrZTBfntTxMWiXJPtIa4DIrmT0QebYF */ - if (pipeline) pipelineTraversal(dag, client, store, setup, compile) - else normalTraversal(dag, client, store, setup, compile) - } - private final val JavaContinue = Task.now(JavaSignal.ContinueCompilation) private def partialSuccess( bundle: SuccessfulCompileBundle, result: ResultBundle - ): PartialSuccess = PartialSuccess(bundle, None, Task.now(result)) + ): PartialSuccess = PartialSuccess(bundle, Task.now(result)) private def blockedBy(dag: Dag[PartialCompileResult]): Option[Project] = { def blockedFromResults(results: List[PartialCompileResult]): Option[Project] = { @@ -294,7 +268,7 @@ object CompileGraph { */ val obtainResultFromDeduplication = runningCompilationTask.map { results => PartialCompileResult.mapEveryResult(results) { - case s @ PartialSuccess(bundle, _, compilerResult) => + case s @ PartialSuccess(bundle, compilerResult) => val newCompilerResult = compilerResult.flatMap { results => results.fromCompiler match { case s: Compiler.Result.Success => @@ -404,14 +378,14 @@ object CompileGraph { import scala.collection.mutable /** - * Traverses the dag of projects in a normal way. + * Turns a dag of projects into a task that returns a dag of compilation results + * that can then be used to debug the evaluation of the compilation within Monix + * and access the compilation results received from Zinc. * - * @param dag is the dag of projects. - * @param computeBundle is the function that sets up the project on every node. - * @param compile is the task we use to compile on every node. + * @param dag The dag of projects to be compiled. * @return A task that returns a dag of compilation results. */ - private def normalTraversal( + def traverse( dag: Dag[Project], client: ClientInfo, store: CompileClientStore, @@ -444,8 +418,7 @@ object CompileGraph { case Leaf(project) => val bundleInputs = BundleInputs(project, dag, Map.empty) setupAndDeduplicate(client, bundleInputs, computeBundle) { bundle => - val oracle = new SimpleOracle - compile(Inputs(bundle, oracle, None, Map.empty)).map { results => + compile(Inputs(bundle, Map.empty)).map { results => results.fromCompiler match { case Compiler.Result.Ok(_) => Leaf(partialSuccess(bundle, results)) case _ => Leaf(toPartialFailure(bundle, results)) @@ -493,8 +466,7 @@ object CompileGraph { val resultsMap = dependentResults.toMap val bundleInputs = BundleInputs(project, dag, dependentProducts.toMap) setupAndDeduplicate(client, bundleInputs, computeBundle) { bundle => - val oracle = new SimpleOracle - val inputs = Inputs(bundle, oracle, None, resultsMap) + val inputs = Inputs(bundle, resultsMap) compile(inputs).map { results => results.fromCompiler match { case Compiler.Result.Ok(_) => @@ -514,233 +486,6 @@ object CompileGraph { loop(dag) } - /** - * Traverses the dag of projects in such a way that allows compilation pipelining. - * - * Note that to use build pipelining, the compilation task needs to have a pipelining - * implementation where the pickles are generated and the promise in [[Inputs]] completed. - * - * @param dag is the dag of projects. - * @param computeBundle is the function that sets up the project on every node. - * @param compile is the function that compiles every node, returning a Task. - * @return A task that returns a dag of compilation results. - */ - private def pipelineTraversal( - dag: Dag[Project], - client: ClientInfo, - store: CompileClientStore, - computeBundle: BundleInputs => Task[CompileBundle], - compile: Inputs => Task[ResultBundle] - ): CompileTraversal = { - val tasks = new scala.collection.mutable.HashMap[Dag[Project], CompileTraversal]() - def register(k: Dag[Project], v: CompileTraversal): CompileTraversal = { - val toCache = store.findPreviousTraversalOrAddNew(k, v).getOrElse(v) - tasks.put(k, toCache) - toCache - } - - def loop(dag: Dag[Project]): CompileTraversal = { - tasks.get(dag) match { - case Some(task) => task - case None => - val task = dag match { - case Leaf(project) => - Task.now(Promise[Array[Signature]]()).flatMap { cf => - val bundleInputs = BundleInputs(project, dag, Map.empty) - setupAndDeduplicate(client, bundleInputs, computeBundle) { bundle => - val jcf = Promise[Unit]() - val end = Promise[Option[CompileProducts]]() - val noSigs = new Array[Signature](0) - val noDefinedMacros = Map.empty[Project, Array[String]] - val oracle = new PipeliningOracle(bundle, noSigs, noDefinedMacros, cf, Nil) - val pipelineInputs = PipelineInputs(cf, end, jcf, JavaContinue, true) - val t = compile(Inputs(bundle, oracle, Some(pipelineInputs), Map.empty)) - val running = - Task.fromFuture(t.executeWithFork.runAsync(ExecutionContext.scheduler)) - val completeJava = Task - .deferFuture(end.future) - .executeOn(ExecutionContext.ioScheduler) - .materialize - .map { - case Success(_) => JavaSignal.ContinueCompilation - case Failure(_) => JavaSignal.FailFastCompilation(bundle.project.name) - } - .memoize - - Task - .deferFuture(cf.future) - .executeOn(ExecutionContext.ioScheduler) - .materialize - .map { upstream => - val ms = oracle.collectDefinedMacroSymbols - Leaf( - PartialCompileResult(bundle, upstream, end, jcf, completeJava, ms, running) - ) - } - } - } - - case Aggregate(dags) => - val downstream = dags.map(loop) - Task.gatherUnordered(downstream).flatMap { dagResults => - Task.now(Parent(PartialEmpty, dagResults)) - } - - case Parent(project, dependencies) => - val downstream = dependencies.map(loop) - Task.gatherUnordered(downstream).flatMap { dagResults => - val failed = dagResults.flatMap(dag => blockedBy(dag).toList) - if (failed.nonEmpty) { - // Register the name of the projects we're blocked on (intransitively) - val blockedResult = Compiler.Result.Blocked(failed.map(_.name)) - val blocked = Task.now(ResultBundle(blockedResult, None, None)) - Task.now(Parent(PartialFailure(project, BlockURI, blocked), dagResults)) - } else { - val results: List[PartialSuccess] = { - val transitive = dagResults.flatMap(Dag.dfs(_)).distinct - transitive.collect { case s: PartialSuccess => s } - } - - val failedPipelineProjects = new mutable.ListBuffer[Project]() - val pipelinedJavaSignals = new mutable.ListBuffer[Task[JavaSignal]]() - val transitiveSignatures = new ju.LinkedHashMap[String, Signature]() - val resultsToBlockOn = new mutable.ListBuffer[Task[(Project, ResultBundle)]]() - val pipelinedDependentProducts = - new mutable.ListBuffer[(Project, BundleProducts)]() - - results.foreach { ps => - val project = ps.bundle.project - ps.pipeliningResults match { - case None => resultsToBlockOn.+=(ps.result.map(r => project -> r)) - case Some(results) => - pipelinedJavaSignals.+=(results.shouldAttemptJavaCompilation) - val signatures = results.signatures - signatures.foreach { signature => - // Don't register if sig for name exists, signature lookup order is DFS - if (!transitiveSignatures.containsKey(signature.name())) - transitiveSignatures.put(signature.name(), signature) - } - - val products = results.productsWhenCompilationIsFinished - val result = products.future.value match { - case Some(Success(products)) => - products match { - case Some(products) => - // Add finished compile products when compilation is finished - pipelinedDependentProducts.+=(project -> Right(products)) - case None => () - } - case Some(Failure(t)) => - // Log if error when computing pipelining results and add to failure - ps.bundle.logger.trace(t) - failedPipelineProjects.+=(project) - case None => - val out = ps.bundle.out - val pipeliningResult = Left( - PartialCompileProducts( - out.internalReadOnlyClassesDir, - out.internalNewClassesDir, - results.definedMacros - ) - ) - pipelinedDependentProducts.+=(project -> pipeliningResult) - } - } - } - - if (failedPipelineProjects.nonEmpty) { - // If any project failed to pipeline, abort compilation with blocked result - val failed = failedPipelineProjects.toList - val blockedResult = Compiler.Result.Blocked(failed.map(_.name)) - val blocked = Task.now(ResultBundle(blockedResult, None, None)) - Task.now(Parent(PartialFailure(project, BlockURI, blocked), dagResults)) - } else { - // Get the compilation result of those projects which were not pipelined - Task.gatherUnordered(resultsToBlockOn.toList).flatMap { nonPipelineResults => - var nonPipelinedDependentProducts = - new mutable.ListBuffer[(Project, BundleProducts)]() - var nonPipelinedDependentResults = - new mutable.ListBuffer[(File, PreviousResult)]() - nonPipelineResults.foreach { - case (p, ResultBundle(s: Compiler.Result.Success, _, _, _)) => - val newProducts = s.products - nonPipelinedDependentProducts.+=(p -> Right(newProducts)) - val newResult = newProducts.resultForDependentCompilationsInSameRun - nonPipelinedDependentResults - .+=(newProducts.newClassesDir.toFile -> newResult) - .+=(newProducts.readOnlyClassesDir.toFile -> newResult) - case _ => () - } - - val projectResultsMap = - (pipelinedDependentProducts.iterator ++ nonPipelinedDependentProducts.iterator).toMap - val allMacros = projectResultsMap - .mapValues(_.fold(_.definedMacroSymbols, _.definedMacroSymbols)) - val allSignatures = { - import scala.collection.JavaConverters._ - // Order of signatures matters (e.g. simulates classpath lookup) - transitiveSignatures.values().iterator().asScala.toArray - } - - val bundleInputs = BundleInputs(project, dag, projectResultsMap) - setupAndDeduplicate(client, bundleInputs, computeBundle) { bundle => - // Signals whether java compilation can proceed or not - val javaSignals = aggregateJavaSignals(pipelinedJavaSignals.toList) - Task.now(Promise[Array[Signature]]()).flatMap { cf => - val jf = Promise[Unit]() - val end = Promise[Option[CompileProducts]]() - val oracle = - new PipeliningOracle(bundle, allSignatures, allMacros, cf, results) - val pipelineInputs = PipelineInputs(cf, end, jf, javaSignals, true) - val t = compile( - Inputs( - bundle, - oracle, - Some(pipelineInputs), - // Pass incremental results for only those projects that were not pipelined - nonPipelinedDependentResults.toMap - ) - ) - - val running = t.executeWithFork.runAsync(ExecutionContext.scheduler) - val ongoing = Task.fromFuture(running) - val cj = { - Task - .deferFuture(end.future) - .executeOn(ExecutionContext.ioScheduler) - .materialize - .map { - case Success(_) => JavaSignal.ContinueCompilation - case Failure(_) => JavaSignal.FailFastCompilation(project.name) - } - }.memoize // Important to memoize this task for performance reasons - - Task - .deferFuture(cf.future) - .executeOn(ExecutionContext.ioScheduler) - .materialize - .map { upstream => - val ms = oracle.collectDefinedMacroSymbols - Parent( - PartialCompileResult(bundle, upstream, end, jf, cj, ms, ongoing), - dagResults - ) - } - } - } - } - } - } - } - } - - register(dag, task.memoize) - } - } - - loop(dag) - } - private def aggregateJavaSignals(xs: List[Task[JavaSignal]]): Task[JavaSignal] = { Task .gatherUnordered(xs) diff --git a/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileResult.scala b/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileResult.scala index dc463c6d89..ef95a0550c 100644 --- a/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileResult.scala +++ b/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileResult.scala @@ -11,7 +11,6 @@ import monix.execution.CancelableFuture import scala.util.Try import scala.concurrent.Promise -import xsbti.compile.Signature sealed trait CompileResult[+R] { def result: R @@ -22,31 +21,6 @@ sealed trait PartialCompileResult extends CompileResult[Task[ResultBundle]] { } object PartialCompileResult { - def apply( - bundle: SuccessfulCompileBundle, - pipelineAttempt: Try[Array[Signature]], - futureProducts: Promise[Option[CompileProducts]], - hasJavacCompleted: Promise[Unit], - shouldCompileJava: Task[JavaSignal], - definedMacroSymbols: Array[String], - result: Task[ResultBundle] - ): PartialCompileResult = { - pipelineAttempt match { - case scala.util.Success(sigs) => - val pipeline = PipelineResults( - sigs, - definedMacroSymbols, - futureProducts, - hasJavacCompleted, - shouldCompileJava - ) - PartialSuccess(bundle, Some(pipeline), result) - case scala.util.Failure(CompileExceptions.CompletePromise) => - PartialSuccess(bundle, None, result) - case scala.util.Failure(t) => - PartialFailure(bundle.project, t, result) - } - } def mapEveryResult( results: Dag[PartialCompileResult] @@ -71,7 +45,7 @@ object PartialCompileResult { bundle.map(b => FinalNormalCompileResult(project, b) :: Nil) case PartialFailures(failures, _) => Task.gatherUnordered(failures.map(toFinalResult(_))).map(_.flatten) - case PartialSuccess(bundle, _, result) => + case PartialSuccess(bundle, result) => result.map(res => FinalNormalCompileResult(bundle.project, res) :: Nil) } } @@ -97,19 +71,10 @@ case class PartialFailures( case class PartialSuccess( bundle: SuccessfulCompileBundle, - pipeliningResults: Option[PipelineResults], result: Task[ResultBundle] ) extends PartialCompileResult with CacheHashCode -case class PipelineResults( - signatures: Array[Signature], - definedMacros: Array[String], - productsWhenCompilationIsFinished: Promise[Option[CompileProducts]], - isJavaCompilationFinished: Promise[Unit], - shouldAttemptJavaCompilation: Task[JavaSignal] -) - sealed trait FinalCompileResult extends CompileResult[ResultBundle] { def result: ResultBundle } diff --git a/frontend/src/main/scala/bloop/engine/tasks/compilation/PipeliningOracle.scala b/frontend/src/main/scala/bloop/engine/tasks/compilation/PipeliningOracle.scala deleted file mode 100644 index 44251aa037..0000000000 --- a/frontend/src/main/scala/bloop/engine/tasks/compilation/PipeliningOracle.scala +++ /dev/null @@ -1,150 +0,0 @@ -package bloop.engine.tasks.compilation - -import java.io.File - -import bloop.data.Project -import bloop.{Compiler, CompilerOracle} -import bloop.engine.ExecutionContext -import bloop.io.AbsolutePath -import bloop.ScalaSig -import bloop.logging.Logger -import bloop.tracing.BraveTracer - -import scala.concurrent.Promise -import scala.collection.mutable - -import monix.eval.Task -import xsbti.compile.Signature -import monix.execution.atomic.AtomicBoolean -import scala.concurrent.Await -import scala.concurrent.duration.Duration -import monix.execution.misc.NonFatal - -/** @inheritdoc */ -final class PipeliningOracle( - bundle: CompileBundle, - signaturesFromRunningCompilations: Array[Signature], - definedMacrosFromRunningCompilations: Map[Project, Array[String]], - startDownstreamCompilation: Promise[Array[Signature]], - scheduledCompilations: List[PartialSuccess] -) extends CompilerOracle { - - /** @inheritdoc */ - override def askForJavaSourcesOfIncompleteCompilations: List[File] = { - scheduledCompilations.flatMap { r => - r.pipeliningResults match { - case None => Nil - case Some(results) => - if (results.isJavaCompilationFinished.isCompleted) Nil - else r.bundle.javaSources.map(_.toFile) - } - } - } - - private val definedMacros = new mutable.HashSet[String]() - - /** @inheritdoc */ - def registerDefinedMacro(definedMacroSymbol: String): Unit = definedMacros.+=(definedMacroSymbol) - - /** @inheritdoc */ - def collectDefinedMacroSymbols: Array[String] = definedMacros.toArray - - /** @inheritdoc */ - @volatile private var requiresMacroInitialization: Boolean = false - def blockUntilMacroClasspathIsReady(usedMacroSymbol: String): Unit = { - if (requiresMacroInitialization) () - else { - val noMacrosDefinedInDependentProjects = { - definedMacrosFromRunningCompilations.isEmpty || - definedMacrosFromRunningCompilations.forall(_._2.isEmpty) - } - - if (noMacrosDefinedInDependentProjects) { - requiresMacroInitialization = true - } else { - // Only return promises for those projects that define any macros - val dependentProjectPromises = scheduledCompilations.flatMap { r => - r.pipeliningResults match { - case None => Nil - case Some(results) => - val hasNoMacros = { - val macros = definedMacrosFromRunningCompilations.get(r.bundle.project) - macros.isEmpty || macros.exists(_.isEmpty) - } - if (hasNoMacros) Nil - else List(Task.deferFuture(results.productsWhenCompilationIsFinished.future)) - } - } - - val waitDownstreamFullCompilations = { - Task - .sequence(dependentProjectPromises) - .map(_ => ()) - .runAsync(ExecutionContext.ioScheduler) - } - - /** - * Block until all the downstream compilations have completed. - * - * We have a guarantee from bloop that these promises will be always - * completed even if their associated compilations fail or are - * cancelled. In any of this scenario, and even if we throw on this - * wait, we catch it and let the compiler logic handle it. If the user - * has cancelled this compilation as well, the compiler logic will - * exit. If the compilation downstream failed, this compilation will - * fail too because supposedly it accesses macros defined downstream. - * Failing here it's fine. - */ - try Await.result(waitDownstreamFullCompilations, Duration.Inf) - catch { case NonFatal(e) => () } - finally { - requiresMacroInitialization = true - } - } - } - } - - /** @inheritdoc */ - def isPipeliningEnabled: Boolean = !startDownstreamCompilation.isCompleted - - /** @inheritdoc */ - def startDownstreamCompilations(signatures: Array[Signature]): Unit = { - startDownstreamCompilation.success(signatures) - } - - /** @inheritdoc */ - def collectDownstreamSignatures(): Array[Signature] = signaturesFromRunningCompilations -} - -object PipeliningOracle { - - /** - * Persists in-memory signatures to a pickles directory associated with the - * target that producted them. - * - * For the moment, this logic is unused in favor of an in-memory populating - * strategy via the analysis callback endpoint `downstreamSignatures`. - */ - def writeSignaturesToPicklesDir( - picklesDir: AbsolutePath, - signatures: List[Signature], - startDownstreamCompilation: Promise[Unit], - tracer: BraveTracer, - logger: Logger - ): Unit = { - val writePickles = signatures.map(ScalaSig.write(picklesDir, _, logger)) - val groupTasks = writePickles.grouped(4).map(group => Task.gatherUnordered(group)).toList - val persistPicklesInParallel = { - tracer.traceTask("writing pickles") { _ => - Task.sequence(groupTasks).doOnFinish { - case None => Task.now { startDownstreamCompilation.trySuccess(()); () } - case Some(t) => Task.now { startDownstreamCompilation.tryFailure(t); () } - } - } - } - - // Think strategies to get a hold of this future or cancel it if compilation is cancelled - persistPicklesInParallel.runAsync(ExecutionContext.ioScheduler) - () - } -} diff --git a/frontend/src/main/scala/bloop/engine/tasks/compilation/SimpleOracle.scala b/frontend/src/main/scala/bloop/engine/tasks/compilation/SimpleOracle.scala deleted file mode 100644 index 789d0b6f21..0000000000 --- a/frontend/src/main/scala/bloop/engine/tasks/compilation/SimpleOracle.scala +++ /dev/null @@ -1,20 +0,0 @@ -package bloop.engine.tasks.compilation - -import bloop.CompilerOracle -import java.io.File -import bloop.ScalaSig -import bloop.io.AbsolutePath -import xsbti.compile.Signature -import scala.collection.mutable - -final class SimpleOracle extends CompilerOracle { - def blockUntilMacroClasspathIsReady(usedMacroSymbol: String): Unit = () - def askForJavaSourcesOfIncompleteCompilations: List[File] = Nil - def isPipeliningEnabled: Boolean = false - def collectDownstreamSignatures: Array[Signature] = new Array[Signature](0) - def startDownstreamCompilations(sigs: Array[Signature]): Unit = () - - private val definedMacros = new mutable.HashSet[String]() - def registerDefinedMacro(definedMacroSymbol: String): Unit = definedMacros.+=(definedMacroSymbol) - def collectDefinedMacroSymbols: Array[String] = definedMacros.toArray -} diff --git a/frontend/src/main/scala/bloop/io/SourceHasher.scala b/frontend/src/main/scala/bloop/io/SourceHasher.scala index 0a6235a335..ac95d4897c 100644 --- a/frontend/src/main/scala/bloop/io/SourceHasher.scala +++ b/frontend/src/main/scala/bloop/io/SourceHasher.scala @@ -19,7 +19,6 @@ import scala.collection.mutable import scala.concurrent.Promise import bloop.data.Project -import bloop.CompilerOracle import bloop.engine.ExecutionContext import bloop.util.monix.FoldLeftAsyncConsumer import bloop.UniqueCompileInputs.HashedSource diff --git a/frontend/src/test/scala/bloop/BuildPipeliningSpec.scala b/frontend/src/test/scala/bloop/BuildPipeliningSpec.scala deleted file mode 100644 index f1b66306ce..0000000000 --- a/frontend/src/test/scala/bloop/BuildPipeliningSpec.scala +++ /dev/null @@ -1,159 +0,0 @@ -package bloop - -import bloop.io.{AbsolutePath, RelativePath, Paths => BloopPaths} -import bloop.io.Environment.lineSeparator -import bloop.logging.RecordingLogger -import bloop.cli.{Commands, ExitStatus} -import bloop.engine.{Feedback, Run, State} -import bloop.util.{TestProject, TestUtil} - -import java.nio.file.Files -import java.util.concurrent.TimeUnit - -import scala.concurrent.Await -import scala.concurrent.duration.Duration -import scala.concurrent.duration.FiniteDuration -import bloop.engine.ExecutionContext - -object BuildPipeliningSpec extends bloop.testing.BaseSuite { - test("compile simple build") { - TestUtil.withinWorkspace { workspace => - object Sources { - val `A.scala` = - """/A.scala - |package a - |class A - """.stripMargin - val `B.scala` = - """/B.scala - |package b - |class B extends a.A - """.stripMargin - val `C.scala` = - """/C.scala - |package c - |class C extends b.B - """.stripMargin - val `D.scala` = - """/D.scala - |package d - |class D extends c.C - """.stripMargin - val `E.scala` = - """/E.scala - |package e - |class E extends d.D - """.stripMargin - } - - val logger = new RecordingLogger(ansiCodesSupported = false) - val `A` = TestProject(workspace, "a", List(Sources.`A.scala`)) - val `B` = TestProject(workspace, "b", List(Sources.`B.scala`), List(`A`)) - val `C` = TestProject(workspace, "c", List(Sources.`C.scala`), List(`B`)) - val `D` = TestProject(workspace, "d", List(Sources.`D.scala`), List(`C`)) - val `E` = TestProject(workspace, "e", List(Sources.`E.scala`), List(`D`)) - val projects = List(`A`, `B`, `C`, `D`) - val state = loadState(workspace, projects, logger) - val compiledState = state.compileWithPipelining(`D`) - assert(compiledState.status == ExitStatus.Ok) - assertValidCompilationState(compiledState, projects) - } - } - - testOnlyOnJava8("compile simple build using Scala 2.10 (without pipelining)") { - TestUtil.withinWorkspace { workspace => - object Sources { - val `A.scala` = - """/A.scala - |class A - """.stripMargin - val `B.scala` = - """/B.scala - |class B extends A - """.stripMargin - } - - val logger = new RecordingLogger(ansiCodesSupported = false) - val jars = { - ScalaInstance - .resolve("org.scala-lang", "scala-compiler", "2.10.7", logger)( - ExecutionContext.ioScheduler - ) - .allJars - .map(AbsolutePath(_)) - } - - val scalaV = Some("2.10.7") - val `A` = TestProject( - workspace, - "a", - List(Sources.`A.scala`), - scalaVersion = scalaV, - jars = jars - ) - - val `B` = TestProject( - workspace, - "b", - List(Sources.`B.scala`), - List(`A`), - scalaVersion = scalaV, - jars = jars - ) - - val projects = List(`A`, `B`) - val state = loadState(workspace, projects, logger) - val compiledState = state.compileWithPipelining(`B`) - assert(compiledState.status == ExitStatus.Ok) - assertValidCompilationState(compiledState, projects) - - assertNoDiff( - logger.renderTimeInsensitiveInfos, - """|Compiling a (1 Scala source) - |Compiled a ??? - |Compiling b (1 Scala source) - |Compiled b ??? - |""".stripMargin - ) - - assertNoDiff( - logger.warnings.mkString(lineSeparator), - """|The project a didn't use pipelined compilation. - |The project b didn't use pipelined compilation. - |""".stripMargin - ) - } - } - - test("pipelining makes Java wait on upstream Scala compiles") { - TestUtil.withinWorkspace { workspace => - object Sources { - val `A.scala` = - """/A.scala - |class A - """.stripMargin - val `B.scala` = - """/B.scala - |class B extends A - """.stripMargin - val `C.java` = - """/C.java - |public class C extends B {} - """.stripMargin - } - - val logger = new RecordingLogger(ansiCodesSupported = false) - val `A` = TestProject(workspace, "a", List(Sources.`A.scala`, Sources.`B.scala`)) - // A project in the middle of the dependency graph with no sources - val `B` = TestProject(workspace, "b", Nil, List(`A`)) - val `C` = TestProject(workspace, "c", List(Sources.`C.java`), List(`B`)) - - val projects = List(`A`, `B`, `C`) - val state = loadState(workspace, projects, logger) - val compiledState = state.compileWithPipelining(`C`) - assert(compiledState.status == ExitStatus.Ok) - // Only check valid state in `A` and `C` because `B` is empty! - assertValidCompilationState(compiledState, List(`A`, `C`)) - } - } -} diff --git a/project/Dependencies.scala b/project/Dependencies.scala index d86c6b0bd5..605945d81c 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -13,7 +13,7 @@ object Dependencies { val nailgunCommit = "a2520c1e" // Keep in sync in BloopComponentCompiler - val zincVersion = "1.3.0-M4+47-d881fa2f" + val zincVersion = "1.3.5" val bspVersion = "2.0.0-M13" val javaDebugVersion = "0.21.0+1-7f1080f1" @@ -57,7 +57,7 @@ object Dependencies { val debugAdapterVersion = "2.0.12" import sbt.librarymanagement.syntax.stringToOrganization - val zinc = "ch.epfl.scala" %% "zinc" % zincVersion + val zinc = "org.scala-sbt" %% "zinc" % zincVersion val bsp4s = "ch.epfl.scala" %% "bsp4s" % bspVersion val bsp4j = "ch.epfl.scala" % "bsp4j" % bspVersion val nailgun = "ch.epfl.scala" % "nailgun-server" % nailgunVersion diff --git a/zinc b/zinc deleted file mode 160000 index d881fa2feb..0000000000 --- a/zinc +++ /dev/null @@ -1 +0,0 @@ -Subproject commit d881fa2feb68b74c9c5afd1b8f62935a4e52b299