diff --git a/.gitmodules b/.gitmodules index d885e28346..2d8bece971 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,7 +1,3 @@ -[submodule "zinc"] - path = zinc - url = https://github.com/scalacenter/zinc.git - branch = loop [submodule "nailgun"] path = nailgun url = https://github.com/scalacenter/nailgun.git diff --git a/backend/src/main/scala/bloop/BloopClassFileManager.scala b/backend/src/main/scala/bloop/BloopClassFileManager.scala index cf8afede1b..3b1d00264e 100644 --- a/backend/src/main/scala/bloop/BloopClassFileManager.scala +++ b/backend/src/main/scala/bloop/BloopClassFileManager.scala @@ -1,28 +1,29 @@ package bloop -import bloop.io.{Paths => BloopPaths} import bloop.io.AbsolutePath -import bloop.tracing.BraveTracer import bloop.io.ParallelOps import bloop.io.ParallelOps.CopyMode +import bloop.io.{Paths => BloopPaths} +import bloop.reporter.Reporter +import bloop.tracing.BraveTracer +import monix.eval.Task +import xsbti.compile.ClassFileManager +import xsbti.compile.PreviousResult import java.io.File +import java.io.IOException +import java.nio.file.CopyOption +import java.nio.file.Files import java.nio.file.Path import java.nio.file.Paths - +import java.nio.file.StandardCopyOption import scala.collection.mutable - -import xsbti.compile.ClassFileManager -import monix.eval.Task -import bloop.reporter.Reporter -import xsbti.compile.PreviousResult -import java.nio.file.Files -import java.io.IOException -import scala.util.Try import scala.util.Failure import scala.util.Success +import scala.util.Try final class BloopClassFileManager( + backupDir0: Path, inputs: CompileInputs, outPaths: CompileOutPaths, allGeneratedRelativeClassFilePaths: mutable.HashMap[String, File], @@ -38,9 +39,16 @@ final class BloopClassFileManager( private[this] val newClassesDirPath = newClassesDir.toString private[this] val dependentClassFilesLinks = new mutable.HashSet[Path]() private[this] val weakClassFileInvalidations = new mutable.HashSet[Path]() + private[this] val generatedFiles = new mutable.HashSet[File] // Supported compile products by the class file manager private[this] val supportedCompileProducts = List(".sjsir", ".nir", ".tasty") + // Files backed up during compilation + private[this] val movedFiles = new mutable.HashMap[File, File] + + private val backupDir = backupDir0.normalize + backupDir.toFile.delete() + Files.createDirectories(backupDir) /** * Returns the set of all invalidated class files. @@ -114,14 +122,14 @@ final class BloopClassFileManager( inputs.dependentResults ) match { case None => () - case Some(foundClassFile) => + case Some(foundClassFilePath) => weakClassFileInvalidations.+=(classFilePath) val newLink = newClassesDir.resolve(relativeFilePath) - BloopClassFileManager.link(newLink, foundClassFile.toPath) match { + BloopClassFileManager.link(newLink, foundClassFilePath) match { case Success(_) => dependentClassFilesLinks.+=(newLink) case Failure(exception) => inputs.logger.error( - s"Failed to create link for invalidated file $foundClassFile: ${exception.getMessage()}" + s"Failed to create link for invalidated file $foundClassFilePath: ${exception.getMessage()}" ) inputs.logger.trace(exception) } @@ -129,7 +137,6 @@ final class BloopClassFileManager( } } } - allInvalidatedClassFilesForProject.++=(classes) val invalidatedExtraCompileProducts = classes.flatMap { classFile => @@ -142,12 +149,28 @@ final class BloopClassFileManager( } } + // Idea taken from the default TransactionalClassFileManager in zinc + // https://github.com/sbt/zinc/blob/c18637c1b30f8ab7d1f702bb98301689ec75854b/internal/zinc-core/src/main/scala/sbt/internal/inc/ClassFileManager.scala#L183 + val toBeBackedUp = (classes ++ invalidatedExtraCompileProducts).filter(c => + !movedFiles.contains(c) && !generatedFiles(c) + ) + for { + c <- toBeBackedUp + if c.exists() + } movedFiles.put(c, move(c)).foreach(move) + + for { + f <- classes + if f.exists() + } f.delete() + allInvalidatedExtraCompileProducts.++=(invalidatedExtraCompileProducts) } def generated(generatedClassFiles: Array[File]): Unit = { memoizedInvalidatedClassFiles = null generatedClassFiles.foreach { generatedClassFile => + generatedFiles += generatedClassFile val newClassFile = generatedClassFile.getAbsolutePath val relativeClassFilePath = newClassFile.replace(newClassesDirPath, "") allGeneratedRelativeClassFilePaths.put(relativeClassFilePath, generatedClassFile) @@ -167,6 +190,7 @@ final class BloopClassFileManager( allInvalidatedExtraCompileProducts.-=(productAssociatedToClassFile) } } + } def complete(success: Boolean): Unit = { @@ -200,6 +224,21 @@ final class BloopClassFileManager( } ) } else { + /* Restore all files from backuped last successful compilation to make sure + * that they are still available. + */ + for { + (orig, tmp) <- movedFiles + if tmp.exists + } { + if (!orig.getParentFile.exists) { + Files.createDirectory(orig.getParentFile.toPath()) + } + Files.move(tmp.toPath(), orig.toPath()) + } + backupDir.toFile().delete() + () + // Delete all compilation products generated in the new classes directory val deleteNewDir = Task { BloopPaths.delete(AbsolutePath(newClassesDir)); () }.memoize backgroundTasksForFailedCompilation.+=( @@ -245,6 +284,12 @@ final class BloopClassFileManager( ) } } + + private def move(c: File): File = { + val target = Files.createTempFile(backupDir, "bloop", ".class").toFile + Files.move(c.toPath(), target.toPath(), StandardCopyOption.REPLACE_EXISTING) + target + } } object BloopClassFileManager { diff --git a/backend/src/main/scala/bloop/BloopClasspathEntryLookup.scala b/backend/src/main/scala/bloop/BloopClasspathEntryLookup.scala index 8b37ab2aac..b2197c16f2 100644 --- a/backend/src/main/scala/bloop/BloopClasspathEntryLookup.scala +++ b/backend/src/main/scala/bloop/BloopClasspathEntryLookup.scala @@ -1,45 +1,54 @@ package bloop -import java.io.File -import java.{util => ju} - -import sbt.util.InterfaceUtil +import bloop.util.AnalysisUtils +import sbt.internal.inc.PlainVirtualFileConverter +import sbt.internal.inc.bloop.internal.BloopNameHashing +import sbt.internal.inc.bloop.internal.BloopStamps +import sbt.internal.inc.classpath.ClasspathUtil import sbt.internal.inc.classpath.ClasspathUtilities - -import xsbti.compile.PreviousResult -import xsbti.compile.PerClasspathEntryLookup +import sbt.util.InterfaceUtil +import xsbti.FileConverter +import xsbti.VirtualFile import xsbti.compile.CompileAnalysis import xsbti.compile.DefinesClass -import java.util.zip.ZipFile -import java.util.zip.ZipException -import java.util.concurrent.ConcurrentHashMap import xsbti.compile.FileHash -import sbt.internal.inc.bloop.internal.BloopNameHashing -import sbt.internal.inc.bloop.internal.BloopStamps +import xsbti.compile.PerClasspathEntryLookup +import xsbti.compile.PreviousResult + +import java.io.File +import java.nio.file.Path +import java.util.concurrent.ConcurrentHashMap +import java.util.zip.ZipException +import java.util.zip.ZipFile +import java.{util => ju} final class BloopClasspathEntryLookup( results: Map[File, PreviousResult], - classpathHashes: Vector[FileHash] + classpathHashes: Vector[FileHash], + converter: FileConverter ) extends PerClasspathEntryLookup { - override def analysis(classpathEntry: File): ju.Optional[CompileAnalysis] = { - InterfaceUtil.toOptional(results.get(classpathEntry)).flatMap(_.analysis()) + override def analysis(classpathEntry: VirtualFile): ju.Optional[CompileAnalysis] = { + val file = converter.toPath(classpathEntry).toFile() + InterfaceUtil.toOptional(results.get(file)).flatMap(_.analysis()) } - override def definesClass(entry: File): DefinesClass = { - if (!entry.exists) FalseDefinesClass + override def definesClass(entry: VirtualFile): DefinesClass = { + val path = converter.toPath(entry) + val file = path.toFile() + if (!file.exists) FalseDefinesClass else { - classpathHashes.find(fh => fh.file() == entry) match { + classpathHashes.find(fh => fh.file() == file) match { case None => FalseDefinesClass case Some(entryHash) => def computeDefinesClassForJar = { - if (!ClasspathUtilities.isArchive(entry, contentFallback = true)) FalseDefinesClass - else new JarDefinesClass(entry) + if (!ClasspathUtil.isArchive(path, contentFallback = true)) FalseDefinesClass + else new JarDefinesClass(file) } - if (BloopStamps.isDirectoryHash(entryHash)) new DirectoryDefinesClass(entry) + if (BloopStamps.isDirectoryHash(entryHash)) new DirectoryDefinesClass(file) else { val (_, cachedDefinesClass) = BloopClasspathEntryLookup.definedClasses.compute( - entry, + file, (entry, definesClass) => { definesClass match { case null => @@ -116,14 +125,15 @@ object BloopClasspathEntryLookup { def definedClassFileInDependencies( relativeClassFile: String, results: Map[File, PreviousResult] - ): Option[File] = { - def findClassFile(t: (File, PreviousResult)): Option[File] = { + ): Option[Path] = { + def findClassFile(t: (File, PreviousResult)): Option[Path] = { val (classesDir, result) = t - val targetClassFile = new File(classesDir, relativeClassFile) + val targetFile = classesDir.toPath().resolve(relativeClassFile) + val targetClassFile = PlainVirtualFileConverter.converter.toVirtualFile(targetFile) InterfaceUtil.toOption(result.analysis()).flatMap { analysis0 => val analysis = analysis0.asInstanceOf[sbt.internal.inc.Analysis] val definedClass = analysis.relations.allProducts.contains(targetClassFile) - if (definedClass) Some(targetClassFile) else None + if (definedClass) Some(targetFile) else None } } diff --git a/backend/src/main/scala/bloop/CompileMode.scala b/backend/src/main/scala/bloop/CompileMode.scala deleted file mode 100644 index fceab7e07f..0000000000 --- a/backend/src/main/scala/bloop/CompileMode.scala +++ /dev/null @@ -1,27 +0,0 @@ -package bloop - -import _root_.monix.eval.Task -import scala.concurrent.Promise -import bloop.io.AbsolutePath -import xsbti.compile.Signature - -/** - * Defines the mode in which compilation should run. - */ -sealed trait CompileMode { - def oracle: CompilerOracle -} - -object CompileMode { - case class Sequential( - oracle: CompilerOracle - ) extends CompileMode - - final case class Pipelined( - completeJavaCompilation: Promise[Unit], - finishedCompilation: Promise[Option[CompileProducts]], - fireJavaCompilation: Task[JavaSignal], - oracle: CompilerOracle, - separateJavaAndScala: Boolean - ) extends CompileMode -} diff --git a/backend/src/main/scala/bloop/CompileProducts.scala b/backend/src/main/scala/bloop/CompileProducts.scala index 318394380a..3729a4522c 100644 --- a/backend/src/main/scala/bloop/CompileProducts.scala +++ b/backend/src/main/scala/bloop/CompileProducts.scala @@ -35,6 +35,5 @@ case class CompileProducts( resultForDependentCompilationsInSameRun: PreviousResult, resultForFutureCompilationRuns: PreviousResult, invalidatedCompileProducts: Set[File], - generatedRelativeClassFilePaths: Map[String, File], - definedMacroSymbols: Array[String] + generatedRelativeClassFilePaths: Map[String, File] ) diff --git a/backend/src/main/scala/bloop/Compiler.scala b/backend/src/main/scala/bloop/Compiler.scala index aeb2a5e4e0..a39d69beeb 100644 --- a/backend/src/main/scala/bloop/Compiler.scala +++ b/backend/src/main/scala/bloop/Compiler.scala @@ -13,8 +13,6 @@ import bloop.tracing.BraveTracer import bloop.logging.{ObservedLogger, Logger} import bloop.reporter.{ProblemPerPhase, ZincReporter} import bloop.util.{AnalysisUtils, UUIDUtil, CacheHashCode} -import bloop.CompileMode.Pipelined -import bloop.CompileMode.Sequential import xsbti.compile._ import xsbti.T2 @@ -23,7 +21,6 @@ import sbt.util.InterfaceUtil import sbt.internal.inc.Analysis import sbt.internal.inc.bloop.BloopZincCompiler import sbt.internal.inc.{FreshCompilerCache, InitialChanges, Locate} -import sbt.internal.inc.bloop.internal.StopPipelining import sbt.internal.inc.{ConcreteAnalysisContents, FileAnalysisStore} import scala.concurrent.Promise @@ -37,6 +34,9 @@ import sbt.internal.inc.bloop.internal.BloopStamps import sbt.internal.inc.bloop.internal.BloopLookup import bloop.reporter.Reporter import bloop.logging.CompilationEvent +import xsbti.VirtualFile +import xsbti.VirtualFileRef +import sbt.internal.inc.PlainVirtualFileConverter case class CompileInputs( scalaInstance: ScalaInstance, @@ -55,7 +55,6 @@ case class CompileInputs( previousCompilerResult: Compiler.Result, reporter: ZincReporter, logger: ObservedLogger[Logger], - mode: CompileMode, dependentResults: Map[File, PreviousResult], cancelPromise: Promise[Unit], tracer: BraveTracer, @@ -164,6 +163,7 @@ object CompileOutPaths { object Compiler { private implicit val filter = bloop.logging.DebugFilter.Compilation + private val converter = PlainVirtualFileConverter.converter private final class BloopProgress( reporter: ZincReporter, cancelPromise: Promise[Unit] @@ -172,7 +172,12 @@ object Compiler { reporter.reportNextPhase(phase, new java.io.File(unitPath)) } - override def advance(current: Int, total: Int): Boolean = { + override def advance( + current: Int, + total: Int, + prevPhase: String, + nextPhase: String + ): Boolean = { val isNotCancelled = !cancelPromise.isCompleted if (isNotCancelled) { reporter.reportCompilationProgress(current.toLong, total.toLong) @@ -260,6 +265,7 @@ object Compiler { def newFileManager: ClassFileManager = { new BloopClassFileManager( + Files.createTempDirectory("bloop"), compileInputs, compileOut, allGeneratedRelativeClassFilePaths, @@ -279,8 +285,9 @@ object Compiler { } def getCompilationOptions(inputs: CompileInputs): CompileOptions = { - val sources = inputs.sources // Sources are all files - val classpath = inputs.classpath.map(_.toFile) + // Sources are all files + val sources = inputs.sources.map(path => converter.toVirtualFile(path.underlying)) + val classpath = inputs.classpath.map(path => converter.toVirtualFile(path.underlying)) val optionsWithoutFatalWarnings = inputs.scalacOptions.flatMap { option => if (option != "-Xfatal-warnings") List(option) else { @@ -295,12 +302,11 @@ object Compiler { CompileOptions .create() - .withClassesDirectory(newClassesDir.toFile) - .withSources(sources.map(_.toFile)) + .withClassesDirectory(newClassesDir) + .withSources(sources) .withClasspath(classpath) .withScalacOptions(optionsWithoutFatalWarnings) .withJavacOptions(inputs.javacOptions) - .withClasspathOptions(inputs.classpathOptions) .withOrder(inputs.compileOrder) } @@ -312,7 +318,11 @@ object Compiler { newClassesDir.toFile -> compileInputs.previousResult ) - val lookup = new BloopClasspathEntryLookup(results, compileInputs.uniqueInputs.classpath) + val lookup = new BloopClasspathEntryLookup( + results, + compileInputs.uniqueInputs.classpath, + converter + ) val reporter = compileInputs.reporter val compilerCache = new FreshCompilerCache val cacheFile = compileInputs.baseDirectory.resolve("cache").toFile @@ -348,19 +358,12 @@ object Compiler { import ch.epfl.scala.bsp import scala.util.{Success, Failure} - val mode = compileInputs.mode val reporter = compileInputs.reporter def cancel(): Unit = { // Complete all pending promises when compilation is cancelled logger.debug(s"Cancelling compilation from ${readOnlyClassesDirPath} to ${newClassesDirPath}") compileInputs.cancelPromise.trySuccess(()) - mode match { - case _: Sequential => () - case Pipelined(completeJava, finishedCompilation, _, _, _) => - completeJava.trySuccess(()) - finishedCompilation.tryFailure(CompileExceptions.FailedOrCancelledPromise) - } // Always report the compilation of a project no matter if it's completed reporter.reportCancelledCompilation() @@ -383,7 +386,16 @@ object Compiler { val uniqueInputs = compileInputs.uniqueInputs reporter.reportStartCompilation(previousProblems) BloopZincCompiler - .compile(inputs, mode, reporter, logger, uniqueInputs, newFileManager, cancelPromise, tracer) + .compile( + inputs, + reporter, + logger, + uniqueInputs, + newFileManager, + cancelPromise, + tracer, + classpathOptions + ) .materialize .doOnCancel(Task(cancel())) .map { @@ -421,9 +433,9 @@ object Compiler { val invalidatedExtraProducts = allInvalidatedExtraCompileProducts.iterator.map(_.toPath).toSet val invalidatedInThisProject = invalidatedClassFiles ++ invalidatedExtraProducts - val blacklist = invalidatedInThisProject ++ readOnlyCopyBlacklist.iterator + val denyList = invalidatedInThisProject ++ readOnlyCopyBlacklist.iterator val config = - ParallelOps.CopyConfiguration(5, CopyMode.ReplaceIfMetadataMismatch, blacklist) + ParallelOps.CopyConfiguration(5, CopyMode.ReplaceIfMetadataMismatch, denyList) val lastCopy = ParallelOps.copyDirectories(config)( readOnlyClassesDir, clientClassesDir.underlying, @@ -448,7 +460,6 @@ object Compiler { } val isNoOp = previousAnalysis.contains(analysis) - val definedMacroSymbols = mode.oracle.collectDefinedMacroSymbols if (isNoOp) { // If no-op, return previous result with updated classpath hashes val noOpPreviousResult = { @@ -464,8 +475,7 @@ object Compiler { noOpPreviousResult, noOpPreviousResult, Set(), - Map.empty, - definedMacroSymbols + Map.empty ) val backgroundTasks = new CompileBackgroundTasks { @@ -524,7 +534,7 @@ object Compiler { val resultForFutureCompilationRuns = { resultForDependentCompilationsInSameRun.withAnalysis( - Optional.of(analysisForFutureCompilationRuns) + Optional.of(analysisForFutureCompilationRuns): Optional[CompileAnalysis] ) } @@ -582,8 +592,7 @@ object Compiler { resultForDependentCompilationsInSameRun, resultForFutureCompilationRuns, allInvalidated.toSet, - allGeneratedProducts, - definedMacroSymbols + allGeneratedProducts ) Result.Success( @@ -603,7 +612,6 @@ object Compiler { reporter.reportEndCompilation() cause match { - case f: StopPipelining => Result.Blocked(f.failedProjectNames) case f: xsbti.CompileFailed => // We cannot guarantee reporter.problems == f.problems, so we aggregate them together val reportedProblems = reporter.allProblemsPerPhase.toList @@ -728,12 +736,14 @@ object Compiler { ): Analysis = { // Cast to the only internal analysis that we support val analysis = analysis0.asInstanceOf[Analysis] - def rebase(file: File): File = { - val filePath = file.toPath.toAbsolutePath + def rebase(file: VirtualFileRef): VirtualFileRef = { + + val filePath = converter.toPath(file).toAbsolutePath() if (!filePath.startsWith(readOnlyClassesDir)) file else { // Hash for class file is the same because the copy duplicates metadata - newClassesDir.resolve(readOnlyClassesDir.relativize(filePath)).toFile + val path = newClassesDir.resolve(readOnlyClassesDir.relativize(filePath)) + converter.toVirtualFile(path) } } @@ -742,11 +752,12 @@ object Compiler { val oldStamps = analysis.stamps // Use empty stamps for files that have fatal warnings so that next compile recompiles them val rebasedSources = oldStamps.sources.map { - case t @ (file, _) => + case t @ (virtualFile, _) => + val file = converter.toPath(virtualFile).toFile() // Assumes file in reported diagnostic matches path in here val fileHasFatalWarnings = sourceFilesWithFatalWarnings.contains(file) if (!fileHasFatalWarnings) t - else file -> BloopStamps.emptyStampFor(file) + else virtualFile -> BloopStamps.emptyStamps } val rebasedProducts = oldStamps.products.map { case t @ (file, _) => @@ -754,7 +765,7 @@ object Compiler { if (rebased == file) t else rebased -> t._2 } // Changes the paths associated with the class file paths - Stamps(rebasedProducts, rebasedSources, oldStamps.binaries) + Stamps(rebasedProducts, rebasedSources, oldStamps.libraries) } val newRelations = { diff --git a/backend/src/main/scala/bloop/CompilerCache.scala b/backend/src/main/scala/bloop/CompilerCache.scala index 4a1a5444bc..186d9fd7ba 100644 --- a/backend/src/main/scala/bloop/CompilerCache.scala +++ b/backend/src/main/scala/bloop/CompilerCache.scala @@ -1,45 +1,48 @@ package bloop -import java.io.File -import java.lang.Iterable -import java.io.PrintWriter -import java.util.concurrent.ConcurrentHashMap - -import javax.tools.JavaFileManager.Location -import javax.tools.JavaFileObject.Kind -import javax.tools.{ - FileObject, - ForwardingJavaFileManager, - JavaFileManager, - JavaFileObject, - JavaCompiler => JavaxCompiler -} -import bloop.io.{AbsolutePath, Paths} -import bloop.util.JavaRuntime +import bloop.io.AbsolutePath +import bloop.io.Paths import bloop.logging.Logger +import bloop.util.JavaRuntime +import sbt.internal.inc.AnalyzingCompiler +import sbt.internal.inc.BloopComponentCompiler +import sbt.internal.inc.BloopZincLibraryManagement +import sbt.internal.inc.ZincUtil +import sbt.internal.inc.bloop.ZincInternals +import sbt.internal.inc.javac.DiagnosticsReporter +import sbt.internal.inc.javac.JavaTools +import sbt.internal.inc.javac.Javadoc +import sbt.internal.inc.javac.WriteReportingJavaFileObject +import sbt.internal.util.LoggerWriter import sbt.librarymanagement.Resolver import xsbti.ComponentProvider -import xsbti.compile.Compilers -import xsbti.compile.{JavaCompiler, JavaTool => XJavaTool} import xsbti.compile.ClassFileManager -import xsbti.{Logger => XLogger, Reporter => XReporter} -import sbt.internal.inc.bloop.ZincInternals -import sbt.internal.inc.{ - AnalyzingCompiler, - ZincUtil, - BloopZincLibraryManagement, - BloopComponentCompiler -} -import sbt.internal.inc.javac.{ - DiagnosticsReporter, - JavaTools, - Javadoc, - WriteReportingJavaFileObject -} -import sbt.internal.util.LoggerWriter +import xsbti.compile.Compilers +import xsbti.compile.JavaCompiler +import xsbti.compile.ScalaCompiler +import xsbti.compile.{JavaTool => XJavaTool} +import xsbti.{Logger => XLogger} +import xsbti.{Reporter => XReporter} + +import java.io.File import java.io.IOException +import java.io.PrintWriter +import java.lang.Iterable +import java.util.concurrent.ConcurrentHashMap +import javax.tools.FileObject +import javax.tools.ForwardingJavaFileManager +import javax.tools.JavaFileManager +import javax.tools.JavaFileManager.Location +import javax.tools.JavaFileObject +import javax.tools.JavaFileObject.Kind +import javax.tools.{JavaCompiler => JavaxCompiler} +import scala.collection.mutable.HashSet import scala.concurrent.ExecutionContext -import xsbti.compile.ScalaCompiler +import xsbti.VirtualFile +import bloop.util.AnalysisUtils +import xsbti.compile.{IncToolOptions, Output} +import sbt.internal.inc.CompilerArguments +import sbt.internal.inc.PlainVirtualFileConverter final class CompilerCache( componentProvider: ComponentProvider, @@ -157,9 +160,12 @@ final class CompilerCache( final class BloopForkedJavaCompiler(javaHome: Option[File]) extends JavaCompiler { import xsbti.compile.IncToolOptions + private val converter = PlainVirtualFileConverter.converter + def run( - sources: Array[File], + sources: Array[VirtualFile], options: Array[String], + output: Output, topts: IncToolOptions, reporter: XReporter, log: XLogger @@ -172,7 +178,7 @@ final class CompilerCache( import sbt.util.InterfaceUtil InterfaceUtil.toOption(topts.classFileManager()) match { case None => logger.error("Missing class file manager for forked Java compiler"); false - case Some(classFileManager) => + case Some(classFileManager: BloopClassFileManager) => import java.nio.file.Files val newInvalidatedEntry = AbsolutePath( Files.createTempDirectory("invalidated-forked-javac") @@ -207,10 +213,18 @@ final class CompilerCache( try { import sbt.internal.inc.javac.BloopForkedJavaUtils - BloopForkedJavaUtils.launch(javaHome, "javac", sources, options, log, reporter) + BloopForkedJavaUtils.launch( + javaHome, + "javac", + sources.map(converter.toPath(_)), + options, + log, + reporter + ) } finally { Paths.delete(newInvalidatedEntry) } + case _ => logger.error("Missing Bloop class file manager for forked Java compiler"); false } } } @@ -226,9 +240,11 @@ final class CompilerCache( import java.io.File import xsbti.compile.IncToolOptions import xsbti.Reporter + private val converter = PlainVirtualFileConverter.converter override def run( - sources: Array[File], + sources: Array[VirtualFile], options: Array[String], + output: Output, incToolOptions: IncToolOptions, reporter: Reporter, log0: xsbti.Logger @@ -252,8 +268,8 @@ final class CompilerCache( import sbt.internal.inc.javac.WriteReportingFileManager val zincFileManager = incToolOptions.classFileManager().get() val fileManager = new BloopInvalidatingFileManager(fileManager0, zincFileManager) - - val jfiles = fileManager0.getJavaFileObjectsFromFiles(sources.toList.asJava) + val sourceFiles: Array[File] = sources.map(converter.toPath(_).toFile()) + val jfiles = fileManager0.getJavaFileObjectsFromFiles(sourceFiles.toList.asJava) try { // Create directories of java args that trigger error if they don't exist def processJavaDirArgument(idx: Int): Unit = { @@ -275,7 +291,13 @@ final class CompilerCache( processJavaDirArgument(cleanedOptions.indexOf("-s")) processJavaDirArgument(cleanedOptions.indexOf("-h")) - val newJavacOptions = cleanedOptions.toList.asJava + output.getSingleOutputAsPath match { + case p if p.isPresent => java.nio.file.Files.createDirectories(p.get) + case _ => + } + + val outputOption = CompilerArguments.outputOption(output) + val newJavacOptions = (cleanedOptions.toList ++ outputOption).asJava log.debug(s"Invoking javac with ${newJavacOptions.asScala.mkString(" ")}") val success = compiler .getTask(logWriter, fileManager, diagnostics, newJavacOptions, null, jfiles) @@ -319,7 +341,11 @@ final class CompilerCache( val invalidated = { zincManager match { case m: bloop.BloopClassFileManager => m.invalidatedClassFilesSet - case _ => zincManager.invalidatedClassFiles().toSet + // Bloop uses it's own classfile manager so this should not happen + case _ => + logger.warn("Could not find BloopClassfileManager that is needed for invaldiation.") + new HashSet[File]() + } } diff --git a/backend/src/main/scala/bloop/CompilerOracle.scala b/backend/src/main/scala/bloop/CompilerOracle.scala deleted file mode 100644 index 2ef484ac64..0000000000 --- a/backend/src/main/scala/bloop/CompilerOracle.scala +++ /dev/null @@ -1,70 +0,0 @@ -package bloop - -import java.io.File -import bloop.io.AbsolutePath -import xsbti.compile.Signature - -/** - * A compiler oracle is an entity that provides answers to questions that come - * up during the compilation of build targets. The oracle is an entity capable - * of synchronizing and answering questions critical for deduplicating and - * running compilations concurrently. - * - * For example, if a project wants to know something about the compilation of - * its dependencies, the oracle would be the right place to create a method - * that provides answers. - * - * The compiler oracle is created every time a project compilation is - * scheduled. Depending on the implementation, it can know both global - * information such as all the ongoing compilations happening in the build - * server, local data such as how a target is being compiled or both. - */ -abstract class CompilerOracle { - - /** - * Returns java sources of all those dependent projects whose compilations - * are not yet finished when build pipelining is enabled. If build pipelining - * is disabled, returns always an empty list since the class files of Java - * sources are already present in the compilation classpath. - */ - def askForJavaSourcesOfIncompleteCompilations: List[File] - - /** - * Registers a macro defined during this compilation run. It takes a full - * symbol name and associates it with the project under compilation. - */ - def registerDefinedMacro(definedMacroSymbol: String): Unit - - /** - * Collects all macro symbols that have been defined by this compilation. - */ - def collectDefinedMacroSymbols: Array[String] - - /** - * Blocks until the macro classpath for this macro is ready. If the macro has - * not been defined, we ignore it (it comes from a third-party library), - * otherwise we will wait until all dependent projects defining macros have - * finished compilation. - */ - def blockUntilMacroClasspathIsReady(usedMacroSymbol: String): Unit - - /** - * Answers if build pipelining is enabled in the whole compilation run. - */ - def isPipeliningEnabled: Boolean - - /** - * Starts downstream compilations with the compile pickle data generated - * during the compilation of a project. This method needs to take care of - * making the pickles accessible to downstream compilations. - */ - def startDownstreamCompilations(signatures: Array[Signature]): Unit - - /** - * Collects all downstream signatures of transitive dependencies that have - * not yet finished compilation. Those dependencies that finished - * compilation don't need to provide their signatures because they will be - * loaded from the classes directory. - */ - def collectDownstreamSignatures(): Array[Signature] -} diff --git a/backend/src/main/scala/bloop/PartialCompileProducts.scala b/backend/src/main/scala/bloop/PartialCompileProducts.scala index 626fb9c5e1..4259e7a713 100644 --- a/backend/src/main/scala/bloop/PartialCompileProducts.scala +++ b/backend/src/main/scala/bloop/PartialCompileProducts.scala @@ -8,6 +8,5 @@ import bloop.io.AbsolutePath */ case class PartialCompileProducts( readOnlyClassesDir: AbsolutePath, - newClassesDir: AbsolutePath, - definedMacroSymbols: Array[String] + newClassesDir: AbsolutePath ) diff --git a/backend/src/main/scala/bloop/ScalaInstance.scala b/backend/src/main/scala/bloop/ScalaInstance.scala index f7a106566c..aeba80e643 100644 --- a/backend/src/main/scala/bloop/ScalaInstance.scala +++ b/backend/src/main/scala/bloop/ScalaInstance.scala @@ -20,17 +20,25 @@ final class ScalaInstance private ( override val version: String, override val allJars: Array[File] ) extends xsbti.compile.ScalaInstance { - override val compilerJar: File = { + + override lazy val loaderCompilerOnly: ClassLoader = + new URLClassLoader(compilerJars().map(_.toURI.toURL), ScalaInstance.topClassLoader) + + override def compilerJars(): Array[File] = { + val all = allJars + .filter(f => isJar(f.getName) && isCompilerJar(f)) + if (all.isEmpty) sys.error(s"Missing compiler jars in Scala jars ${allJars.mkString(", ")}") + all + } + + override def libraryJars(): Array[File] = { allJars - .find(f => isJar(f.getName) && hasScalaCompilerName(f.getName)) - .getOrElse( - sys.error(s"Missing compiler jar in Scala jars ${allJars.mkString(", ")}") - ) + .filter(f => isJar(f.getName) && hasScalaLibraryName(f.getName)) } - override val libraryJar: File = { + override val compilerJar: File = { allJars - .find(f => isJar(f.getName) && hasScalaLibraryName(f.getName)) + .find(f => isJar(f.getName) && hasScalaCompilerName(f.getName)) .getOrElse( sys.error(s"Missing compiler jar in Scala jars ${allJars.mkString(", ")}") ) @@ -47,10 +55,10 @@ final class ScalaInstance private ( (organization == "org.scala-lang" && version.startsWith("3.")) override lazy val loaderLibraryOnly: ClassLoader = - new URLClassLoader(Array(libraryJar.toURI.toURL), ScalaInstance.topClassLoader) + new URLClassLoader(libraryJars.map(_.toURI.toURL), ScalaInstance.topClassLoader) override lazy val loader: ClassLoader = { // For some exceptionally weird reason, we need to load all jars for dotty here - val jarsToLoad = if (isDotty) allJars else allJars.filterNot(_ == libraryJar) + val jarsToLoad = if (isDotty) allJars else allJars.filterNot(jar => libraryJars.contains(jar)) new URLClassLoader(jarsToLoad.map(_.toURI.toURL), loaderLibraryOnly) } @@ -60,7 +68,25 @@ final class ScalaInstance private ( filename.startsWith(ScalacCompilerName) || (isDotty && (filename.startsWith("dotty-compiler") || filename.startsWith("scala3-compiler"))) private def hasScalaLibraryName(filename: String): Boolean = - filename.startsWith("scala-library") + filename.startsWith("scala-library") || filename.startsWith("scala3-library") + + private def hasScalaReflectName(filename: String): Boolean = + filename.startsWith("scala-reflect") + + private def hasScalaXmlName(filename: String): Boolean = + filename.startsWith("scala-xml") + + private def hasScala3AdditionalLibraryName(filename: String): Boolean = + isDotty && + (filename.startsWith("scala3-interfaces") || filename.startsWith("tasty-core") || + filename.startsWith("scala-asm")) + + private def isCompilerJar(file: File) = { + val name = file.getName() + hasScalaReflectName(name) || hasScalaCompilerName(name) || + hasScalaLibraryName(name) || hasScalaXmlName(name) || + hasScala3AdditionalLibraryName(name) + } /** Tells us what the real version of the classloaded scalac compiler in this instance is. */ override def actualVersion(): String = { diff --git a/backend/src/main/scala/bloop/UniqueCompileInputs.scala b/backend/src/main/scala/bloop/UniqueCompileInputs.scala index e860d40954..9190d80c97 100644 --- a/backend/src/main/scala/bloop/UniqueCompileInputs.scala +++ b/backend/src/main/scala/bloop/UniqueCompileInputs.scala @@ -3,6 +3,7 @@ package bloop import bloop.io.AbsolutePath import bloop.util.CacheHashCode import xsbti.compile.FileHash +import xsbti.VirtualFileRef case class UniqueCompileInputs( sources: Vector[UniqueCompileInputs.HashedSource], @@ -36,7 +37,7 @@ case class UniqueCompileInputs( } object UniqueCompileInputs { - case class HashedSource(source: AbsolutePath, hash: Int) + case class HashedSource(source: VirtualFileRef, hash: Int) def emptyFor(originPath: String): UniqueCompileInputs = { UniqueCompileInputs(Vector.empty, Vector.empty, Vector.empty, Vector.empty, originPath) diff --git a/backend/src/main/scala/bloop/io/ClasspathHasher.scala b/backend/src/main/scala/bloop/io/ClasspathHasher.scala index 12dec74d9c..9e37d65555 100644 --- a/backend/src/main/scala/bloop/io/ClasspathHasher.scala +++ b/backend/src/main/scala/bloop/io/ClasspathHasher.scala @@ -27,13 +27,14 @@ import sbt.internal.inc.bloop.internal.BloopStamps import sbt.io.IO import java.util.concurrent.TimeUnit import java.io.PrintStream +import xsbti.VirtualFile object ClasspathHasher { // For more safety, store both the time and size private type JarMetadata = (FileTime, Long) - private[this] val hashingPromises = new ConcurrentHashMap[File, Promise[FileHash]]() - private[this] val cacheMetadataJar = new ConcurrentHashMap[File, (JarMetadata, FileHash)]() + private[this] val hashingPromises = new ConcurrentHashMap[Path, Promise[FileHash]]() + private[this] val cacheMetadataJar = new ConcurrentHashMap[Path, (JarMetadata, FileHash)]() /** * Hash the classpath in parallel with Monix's task. @@ -73,35 +74,35 @@ object ClasspathHasher { val timeoutSeconds: Long = 20L // We'll add the file hashes to the indices here and return it at the end val classpathHashes = new Array[FileHash](classpath.length) - case class AcquiredTask(file: File, idx: Int, p: Promise[FileHash]) + case class AcquiredTask(file: Path, idx: Int, p: Promise[FileHash]) val isCancelled = AtomicBoolean(false) val parallelConsumer = { Consumer.foreachParallelAsync[AcquiredTask](parallelUnits) { - case AcquiredTask(file, idx, p) => + case AcquiredTask(path, idx, p) => // Use task.now because Monix's load balancer already forces an async boundary val hashingTask = Task.now { val hash = try { if (cancelCompilation.isCompleted) { - BloopStamps.cancelledHash(file) + BloopStamps.cancelledHash(path) } else if (isCancelled.get) { cancelCompilation.trySuccess(()) - BloopStamps.cancelledHash(file) + BloopStamps.cancelledHash(path) } else { - val filePath = file.toPath - val attrs = Files.readAttributes(filePath, classOf[BasicFileAttributes]) - if (attrs.isDirectory) BloopStamps.directoryHash(file) + val attrs = Files.readAttributes(path, classOf[BasicFileAttributes]) + if (attrs.isDirectory) BloopStamps.directoryHash(path) else { val currentMetadata = - (FileTime.fromMillis(IO.getModifiedTimeOrZero(file)), attrs.size()) - Option(cacheMetadataJar.get(file)) match { + (FileTime.fromMillis(IO.getModifiedTimeOrZero(path.toFile)), attrs.size()) + Option(cacheMetadataJar.get(path)) match { case Some((metadata, hashHit)) if metadata == currentMetadata => hashHit case _ => - tracer.traceVerbose(s"computing hash ${filePath.toAbsolutePath.toString}") { + tracer.traceVerbose(s"computing hash ${path.toAbsolutePath.toString}") { _ => - val newHash = FileHash.of(file, ByteHasher.hashFileContents(file)) - cacheMetadataJar.put(file, (currentMetadata, newHash)) + val newHash = + FileHash.of(path, ByteHasher.hashFileContents(path.toFile)) + cacheMetadataJar.put(path, (currentMetadata, newHash)) newHash } } @@ -109,10 +110,10 @@ object ClasspathHasher { } } catch { // Can happen when a file doesn't exist, for example - case monix.execution.misc.NonFatal(t) => BloopStamps.emptyHash(file) + case monix.execution.misc.NonFatal(t) => BloopStamps.emptyHash(path) } classpathHashes(idx) = hash - hashingPromises.remove(file, p) + hashingPromises.remove(path, p) p.trySuccess(hash) () } @@ -131,12 +132,12 @@ object ClasspathHasher { TimeUnit.SECONDS, new Runnable { def run(): Unit = { - val hash = BloopStamps.cancelledHash(file) + val hash = BloopStamps.cancelledHash(path) // Complete if hashing for this entry hasn't finished in 15s, otherwise ignore - hashingPromises.remove(file, p) + hashingPromises.remove(path, p) if (p.trySuccess(hash)) { val msg = - s"Hashing ${file} is taking more than ${timeoutSeconds}s, detaching downstream clients to unblock them..." + s"Hashing ${path} is taking more than ${timeoutSeconds}s, detaching downstream clients to unblock them..." try { logger.warn(msg) serverOut.println(msg) @@ -157,7 +158,7 @@ object ClasspathHasher { val acquiredByOtherTasks = new mutable.ListBuffer[Task[Unit]]() val acquiredByThisHashingProcess = new mutable.ListBuffer[AcquiredTask]() - def acquireHashingEntry(entry: File, entryIdx: Int): Unit = { + def acquireHashingEntry(entry: Path, entryIdx: Int): Unit = { if (isCancelled.get) () else { val entryPromise = Promise[FileHash]() @@ -191,8 +192,7 @@ object ClasspathHasher { val initEntries = Task { classpath.zipWithIndex.foreach { case t @ (absoluteEntry, idx) => - val entry = absoluteEntry.toFile - acquireHashingEntry(entry, idx) + acquireHashingEntry(absoluteEntry.underlying, idx) } }.doOnCancel(Task { isCancelled.compareAndSet(false, true); () }) diff --git a/backend/src/main/scala/bloop/reporter/ObservedReporter.scala b/backend/src/main/scala/bloop/reporter/ObservedReporter.scala index 28dad4acf2..3237f3ec89 100644 --- a/backend/src/main/scala/bloop/reporter/ObservedReporter.scala +++ b/backend/src/main/scala/bloop/reporter/ObservedReporter.scala @@ -8,6 +8,7 @@ import ch.epfl.scala.bsp import bloop.logging.{ObservedLogger, Logger} import scala.concurrent.Promise import bloop.logging.CompilationEvent +import xsbti.VirtualFile final class ObservedReporter( val observedLogger: ObservedLogger[Logger], @@ -73,7 +74,10 @@ final class ObservedReporter( registerAction(ReporterAction.ProcessEndCompilation(code)) } - override def reportStartIncrementalCycle(sources: Seq[File], outputDirs: Seq[File]): Unit = { + override def reportStartIncrementalCycle( + sources: Seq[VirtualFile], + outputDirs: Seq[File] + ): Unit = { underlying.reportStartIncrementalCycle(sources, outputDirs) registerAction(ReporterAction.ReportStartIncrementalCycle(sources, outputDirs)) } diff --git a/backend/src/main/scala/bloop/reporter/Reporter.scala b/backend/src/main/scala/bloop/reporter/Reporter.scala index ad633842e9..1100aab355 100644 --- a/backend/src/main/scala/bloop/reporter/Reporter.scala +++ b/backend/src/main/scala/bloop/reporter/Reporter.scala @@ -15,6 +15,8 @@ import scala.util.Try import bloop.logging.CompilationEvent import scala.concurrent.Promise import monix.execution.atomic.AtomicInt +import xsbti.VirtualFile +import sbt.internal.inc.PlainVirtualFileConverter /** * A flexible reporter whose configuration is provided by a `ReporterConfig`. @@ -36,6 +38,7 @@ abstract class Reporter( override val config: ReporterConfig, val _problems: Reporter.Buffer[ProblemPerPhase] ) extends ZincReporter { + protected val converter = PlainVirtualFileConverter.converter private case class PositionId(sourcePath: String, offset: Int) private val _severities = TrieMap.empty[PositionId, Severity] private val _messages = TrieMap.empty[PositionId, List[String]] @@ -270,7 +273,7 @@ trait ZincReporter extends xsbti.Reporter with ConfigurableReporter { * inputs. This method is not called if the compilation is a no-op (e.g. same * analysis as before). */ - def reportStartIncrementalCycle(sources: Seq[File], outputDirs: Seq[File]): Unit + def reportStartIncrementalCycle(sources: Seq[VirtualFile], outputDirs: Seq[File]): Unit /** Report when the compiler enters in a phase. */ def reportNextPhase(phase: String, sourceFile: File): Unit diff --git a/backend/src/main/scala/bloop/reporter/ReporterAction.scala b/backend/src/main/scala/bloop/reporter/ReporterAction.scala index c2ede91ef4..ae537364f2 100644 --- a/backend/src/main/scala/bloop/reporter/ReporterAction.scala +++ b/backend/src/main/scala/bloop/reporter/ReporterAction.scala @@ -3,6 +3,7 @@ package bloop.reporter import java.io.File import scala.util.Try import ch.epfl.scala.bsp +import xsbti.VirtualFile sealed trait ReporterAction object ReporterAction { @@ -10,7 +11,7 @@ object ReporterAction { final case object ReportStartCompilation extends ReporterAction final case class ReportStartIncrementalCycle( - sources: Seq[File], + sources: Seq[VirtualFile], outputDirs: Seq[File] ) extends ReporterAction diff --git a/backend/src/main/scala/bloop/scalasig/PickleMarker.scala b/backend/src/main/scala/bloop/scalasig/PickleMarker.scala deleted file mode 100644 index 3e7c66ef42..0000000000 --- a/backend/src/main/scala/bloop/scalasig/PickleMarker.scala +++ /dev/null @@ -1,60 +0,0 @@ -// Imported from twitter/rsc with minor modifications -// Copyright (c) 2017-2019 Twitter, Inc. -// Licensed under the Apache License, Version 2.0 (see LICENSE.md). -package bloop.scalasig - -import org.objectweb.asm.CustomAttribute - -final class PickleMarker extends CustomAttribute("ScalaSig", PickleMarker.bytes) - -object PickleMarker { - val bytes: Array[Byte] = { - val writer = new PickleWriter - writer.writeVarint(5) // Major pickle version - writer.writeVarint(0) // Minor pickle version - writer.writeVarint(0) - writer.toByteArray - } - - final class PickleWriter { - private var bytes = new Array[Byte](1024) - var offset = 0 - - def writeByte(x: Int): Unit = { - val requestedLen = offset + 1 - if (requestedLen > bytes.length) { - val bytes1 = new Array[Byte](requestedLen * 2) - Array.copy(bytes, 0, bytes1, 0, offset) - bytes = bytes1 - } - bytes(offset) = x.toByte - offset += 1 - } - - // NOTE: Write a 32-bit number as a base-128 varint. - // To learn more what a varint means, check out: - // https://developers.google.com/protocol-buffers/docs/encoding#varints - def writeVarint(x: Int): Unit = { - writeVarlong(x.toLong & 0x00000000ffffffffL) - } - - // NOTE: Write a 64-bit number as a base-128 varint. - // To learn more what a varint means, check out: - // https://developers.google.com/protocol-buffers/docs/encoding#varints - def writeVarlong(x: Long): Unit = { - def writePrefix(x: Long): Unit = { - val y = x >>> 7 - if (y != 0L) writePrefix(y) - writeByte(((x & 0x7f) | 0x80).toInt) - } - val y = x >>> 7 - if (y != 0L) writePrefix(y) - writeByte((x & 0x7f).toInt) - } - - def toByteArray: Array[Byte] = { - import java.util.Arrays - Arrays.copyOfRange(bytes, 0, offset) - } - } -} diff --git a/backend/src/main/scala/bloop/scalasig/ScalaSig.scala b/backend/src/main/scala/bloop/scalasig/ScalaSig.scala deleted file mode 100644 index 36cbe27394..0000000000 --- a/backend/src/main/scala/bloop/scalasig/ScalaSig.scala +++ /dev/null @@ -1,71 +0,0 @@ -package bloop - -import bloop.scalasig.ScalaSigWriter -import bloop.io.AbsolutePath -import bloop.scalasig.PickleMarker - -import monix.eval.Task -import java.nio.file.Files -import java.nio.charset.StandardCharsets -import bloop.logging.Logger -import bloop.logging.DebugFilter -import xsbti.compile.Signature - -object ScalaSig { - def write(picklesDir: AbsolutePath, sig: Signature, logger: Logger): Task[Unit] = { - Task { - val targetPicklePath = picklesDir.resolve(sig.name) - val targetPickleParentPath = targetPicklePath.getParent - val rawClassFileName = targetPicklePath.underlying.getFileName().toString - val dummyClassPath = targetPickleParentPath.resolve(s"${rawClassFileName}.class") - val classFileName = { - if (rawClassFileName.endsWith("package") || rawClassFileName.endsWith("package$")) { - s"${targetPickleParentPath.toString}.$rawClassFileName" - } else { - rawClassFileName - } - } - - val bytes = toBinary(rawClassFileName, sig) - logger.debug(s"Writing pickle to $dummyClassPath")(DebugFilter.Compilation) - Files.write(dummyClassPath.underlying, bytes) - () - } - } - - def toBinary(className: String, sig: Signature): Array[Byte] = { - import org.objectweb.asm._ - import org.objectweb.asm.Opcodes._ - import org.objectweb.asm.tree._ - val classWriter = new ClassWriter(0) - classWriter.visit( - V1_8, - ACC_PUBLIC + ACC_SUPER, - className, - null, - "java/lang/Object", - null - ) - /*if (classfile.source.nonEmpty) { - classWriter.visitSource(classfile.source, null) - }*/ - val packedScalasig = ScalaSigWriter.packScalasig(sig.content) - packedScalasig match { - case Array(packedScalasig) => - val desc = "Lscala/reflect/ScalaSignature;" - val av = classWriter.visitAnnotation(desc, true) - av.visit("bytes", packedScalasig) - av.visitEnd() - case packedScalasigChunks => - val desc = "Lscala/reflect/ScalaLongSignature;" - val av = classWriter.visitAnnotation(desc, true) - val aav = av.visitArray("bytes") - packedScalasigChunks.foreach(aav.visit("bytes", _)) - aav.visitEnd() - av.visitEnd() - } - classWriter.visitAttribute(new PickleMarker) - classWriter.visitEnd() - classWriter.toByteArray - } -} diff --git a/backend/src/main/scala/bloop/scalasig/ScalaSigWriter.scala b/backend/src/main/scala/bloop/scalasig/ScalaSigWriter.scala deleted file mode 100644 index aaf5984a5e..0000000000 --- a/backend/src/main/scala/bloop/scalasig/ScalaSigWriter.scala +++ /dev/null @@ -1,144 +0,0 @@ -// Imported from twitter/rsc with minor modifications -// Copyright (c) 2017-2019 Twitter, Inc. -// Licensed under the Apache License, Version 2.0 (see LICENSE.md). -// NOTE: This file has been partially copy/pasted from scala/scala. -package bloop.scalasig - -// NOTE: While ClassfileReader is documented, ClassfileWriter is not. -// The implementation in Scalac seems to be inconsistent with the official -// documentation at: https://docs.scala-lang.org/sips/picked-signatures.html. -// * https://github.com/scala/scala/blob/v2.12.6/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala -// * https://github.com/scala/scala/blob/v2.12.6/src/reflect/scala/reflect/internal/AnnotationInfos.scala -// * https://github.com/scala/scala/blob/v2.12.6/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala -object ScalaSigWriter { - def packScalasig(unpackedScalasig: Array[Byte]): Array[String] = { - val ubytes = mapToNextModSevenBits(encode8to7(unpackedScalasig)) - if (needsScalaLongSignature(ubytes)) ubytesToArray(ubytes) - else Array(ubytesToString(ubytes)) - } - - private def encode8to7(src: Array[Byte]): Array[Byte] = { - val srclen = src.length - val dstlen = (srclen * 8 + 6) / 7 - val dst = new Array[Byte](dstlen) - var i = 0 - var j = 0 - while (i + 6 < srclen) { - var in: Int = src(i) & 0xff - dst(j) = (in & 0x7f).toByte - var out: Int = in >>> 7 - in = src(i + 1) & 0xff - dst(j + 1) = (out | (in << 1) & 0x7f).toByte - out = in >>> 6 - in = src(i + 2) & 0xff - dst(j + 2) = (out | (in << 2) & 0x7f).toByte - out = in >>> 5 - in = src(i + 3) & 0xff - dst(j + 3) = (out | (in << 3) & 0x7f).toByte - out = in >>> 4 - in = src(i + 4) & 0xff - dst(j + 4) = (out | (in << 4) & 0x7f).toByte - out = in >>> 3 - in = src(i + 5) & 0xff - dst(j + 5) = (out | (in << 5) & 0x7f).toByte - out = in >>> 2 - in = src(i + 6) & 0xff - dst(j + 6) = (out | (in << 6) & 0x7f).toByte - out = in >>> 1 - dst(j + 7) = out.toByte - i += 7 - j += 8 - } - if (i < srclen) { - var in: Int = src(i) & 0xff - dst(j) = (in & 0x7f).toByte; j += 1 - var out: Int = in >>> 7 - if (i + 1 < srclen) { - in = src(i + 1) & 0xff - dst(j) = (out | (in << 1) & 0x7f).toByte; j += 1 - out = in >>> 6 - if (i + 2 < srclen) { - in = src(i + 2) & 0xff - dst(j) = (out | (in << 2) & 0x7f).toByte; j += 1 - out = in >>> 5 - if (i + 3 < srclen) { - in = src(i + 3) & 0xff - dst(j) = (out | (in << 3) & 0x7f).toByte; j += 1 - out = in >>> 4 - if (i + 4 < srclen) { - in = src(i + 4) & 0xff - dst(j) = (out | (in << 4) & 0x7f).toByte; j += 1 - out = in >>> 3 - if (i + 5 < srclen) { - in = src(i + 5) & 0xff - dst(j) = (out | (in << 5) & 0x7f).toByte; j += 1 - out = in >>> 2 - } - } - } - } - } - if (j < dstlen) dst(j) = out.toByte - } - dst - } - - private def mapToNextModSevenBits(src: Array[Byte]): Array[Byte] = { - var i = 0 - val srclen = src.length - while (i < srclen) { - val in = src(i) - src(i) = (if (in == 0x7f) 0.toByte else (in + 1).toByte) - i += 1 - } - src - } - - private def needsScalaLongSignature(src: Array[Byte]): Boolean = { - var i = 0 - var numZeros = 0 - while (i < src.length) { - if (src(i) == 0) numZeros += 1 - i += 1 - } - (src.length + numZeros) >= 65536 - } - - private def ubytesToString(ubytes: Array[Byte]): String = { - val chars = new Array[Char](ubytes.length) - var i = 0 - while (i < ubytes.length) { - val b: Byte = ubytes(i) - assert((b & ~0x7f) == 0) - chars(i) = b.asInstanceOf[Char] - i += 1 - } - new String(chars) - } - - private def ubytesToArray(ubytes: Array[Byte]): Array[String] = { - var strs: List[String] = Nil - var prevOffset = 0 - var offset = 0 - var encLength = 0 - while (offset < ubytes.length) { - val deltaEncLength = if (ubytes(offset) == 0) 2 else 1 - val newEncLength = encLength.toLong + deltaEncLength - if (newEncLength >= 65535) { - val ba = ubytes.slice(prevOffset, offset) - strs ::= ubytesToString(ba) - encLength = 0 - prevOffset = offset - } else { - encLength += deltaEncLength - offset += 1 - } - } - if (prevOffset < offset) { - assert(offset == ubytes.length) - val ba = ubytes.slice(prevOffset, offset) - strs ::= ubytesToString(ba) - } - strs.reverse.toArray - } -} diff --git a/backend/src/main/scala/bloop/util/AnalysisUtils.scala b/backend/src/main/scala/bloop/util/AnalysisUtils.scala index 5d66e1e02d..698337eab6 100644 --- a/backend/src/main/scala/bloop/util/AnalysisUtils.scala +++ b/backend/src/main/scala/bloop/util/AnalysisUtils.scala @@ -1,14 +1,15 @@ package bloop.util -import java.io.File - import bloop.reporter.ProblemPerPhase +import xsbti.VirtualFileRef import xsbti.compile.CompileAnalysis import xsbti.compile.analysis.SourceInfo +import java.io.File + object AnalysisUtils { import scala.collection.JavaConverters._ - def sourceInfosFrom(previousAnalysis: CompileAnalysis): Map[File, SourceInfo] = { + def sourceInfosFrom(previousAnalysis: CompileAnalysis): Map[VirtualFileRef, SourceInfo] = { previousAnalysis.readSourceInfos().getAllSourceInfos.asScala.toMap } diff --git a/backend/src/main/scala/sbt/internal/inc/BloopComponentCompiler.scala b/backend/src/main/scala/sbt/internal/inc/BloopComponentCompiler.scala index 363a5c9d1d..996675ca26 100644 --- a/backend/src/main/scala/sbt/internal/inc/BloopComponentCompiler.scala +++ b/backend/src/main/scala/sbt/internal/inc/BloopComponentCompiler.scala @@ -31,6 +31,8 @@ import _root_.bloop.logging.{Logger => BloopLogger} import _root_.bloop.{DependencyResolution => BloopDependencyResolution} import _root_.bloop.logging.DebugFilter import scala.concurrent.ExecutionContext +import java.nio.file.Path +import sbt.internal.inc.classpath.ClasspathUtil object BloopComponentCompiler { import xsbti.compile.ScalaInstance @@ -61,10 +63,9 @@ object BloopComponentCompiler { } val (isDotty, organization, version) = scalaInstance match { - case instance: BloopScalaInstance => - if (instance.isDotty) (true, instance.organization, instance.version) - else (false, "ch.epfl.scala", latestVersion) - case instance: ScalaInstance => (false, "ch.epfl.scala", latestVersion) + case instance: BloopScalaInstance if instance.isDotty => + (true, instance.organization, instance.version) + case _ => (false, "org.scala-sbt", latestVersion) } val bridgeId = compilerBridgeId(scalaInstance.version) @@ -102,24 +103,14 @@ object BloopComponentCompiler { scheduler: ExecutionContext ) extends CompilerBridgeProvider { - private def is213ThatNeedsPreviousZinc(scalaVersion: String): Boolean = { - scalaVersion.startsWith("2.13.0") || - scalaVersion.startsWith("2.13.1") || - scalaVersion.startsWith("2.13.2") - } - /** * Defines a richer interface for Scala users that want to pass in an explicit module id. * * Note that this method cannot be defined in [[CompilerBridgeProvider]] because [[ModuleID]] * is a Scala-defined class to which the compiler bridge cannot depend on. */ - private def compiledBridge(bridgeSources0: ModuleID, scalaInstance: ScalaInstance): File = { + private def compiledBridge(bridgeSources: ModuleID, scalaInstance: ScalaInstance): File = { val scalaVersion = scalaInstance.version() - val bridgeSources = - if (is213ThatNeedsPreviousZinc(scalaVersion)) - bridgeSources0.withRevision("1.3.0-M4+42-5daa8ed7") - else bridgeSources0 val raw = new RawCompiler(scalaInstance, ClasspathOptionsUtil.auto, logger) val zinc = new BloopComponentCompiler(raw, manager, bridgeSources, logger, scheduler) logger.debug(s"Getting $bridgeSources for Scala ${scalaInstance.version}")( @@ -169,18 +160,19 @@ object BloopComponentCompiler { val scalaLibrary = scalaArtifacts.library val jarsToLoad = (scalaCompiler +: scalaLibrary +: scalaArtifacts.others).toArray assert(jarsToLoad.forall(_.exists), "One or more jar(s) in the Scala instance do not exist.") - val loaderLibraryOnly = ClasspathUtilities.toLoader(Vector(scalaLibrary)) + val loaderLibraryOnly = ClasspathUtil.toLoader(Vector(scalaLibrary.toPath())) val jarsToLoad2 = jarsToLoad.toVector.filterNot(_ == scalaLibrary) - val loader = ClasspathUtilities.toLoader(jarsToLoad2, loaderLibraryOnly) + val loader = ClasspathUtil.toLoader(jarsToLoad2.map(_.toPath()), loaderLibraryOnly) val properties = ResourceLoader.getSafePropertiesFor("compiler.properties", loader) val loaderVersion = Option(properties.getProperty("version.number")) val scalaV = loaderVersion.getOrElse("unknown") new inc.ScalaInstance( scalaV, loader, + loader, loaderLibraryOnly, - scalaLibrary, - scalaCompiler, + Array(scalaLibrary), + jarsToLoad, jarsToLoad, loaderVersion ) @@ -274,7 +266,7 @@ private[inc] class BloopComponentCompiler( logger, resolveSources = shouldResolveSources )(scheduler) match { - case Right(paths) => paths.map(_.toFile).toVector + case Right(paths) => paths.map(_.underlying).toVector case Left(t) => val msg = s"Couldn't retrieve module $bridgeSources" throw new InvalidComponent(msg, t) @@ -282,9 +274,10 @@ private[inc] class BloopComponentCompiler( if (!shouldResolveSources) { // This is usually true in the Dotty case, that has a pre-compiled compiler - manager.define(compilerBridgeId, allArtifacts) + manager.define(compilerBridgeId, allArtifacts.map(_.toFile())) } else { - val (sources, xsbtiJars) = allArtifacts.partition(_.getName.endsWith("-sources.jar")) + val (sources, xsbtiJars) = + allArtifacts.partition(_.toFile.getName.endsWith("-sources.jar")) val (toCompileID, allSources) = { val instance = compiler.scalaInstance if (!HydraSupport.isEnabled(compiler.scalaInstance)) (bridgeSources.name, sources) @@ -306,7 +299,7 @@ private[inc] class BloopComponentCompiler( AnalyzingCompiler.compileSources( allSources, - target, + target.toPath(), xsbtiJars, toCompileID, compiler, @@ -321,9 +314,9 @@ private[inc] class BloopComponentCompiler( import xsbti.compile.ScalaInstance private def mergeBloopAndHydraBridges( - bloopBridgeSourceJars: Vector[File], + bloopBridgeSourceJars: Vector[Path], hydraBridgeModule: ModuleID - ): Either[InvalidComponent, Vector[File]] = { + ): Either[InvalidComponent, Vector[Path]] = { val hydraSourcesJars = BloopDependencyResolution.resolveWithErrors( List( BloopDependencyResolution @@ -337,7 +330,7 @@ private[inc] class BloopComponentCompiler( resolveSources = true, additionalRepositories = List(HydraSupport.resolver) )(scheduler) match { - case Right(paths) => Right(paths.map(_.toFile).toVector) + case Right(paths) => Right(paths.map(_.underlying).toVector) case Left(t) => val msg = s"Couldn't retrieve module $hydraBridgeModule" Left(new InvalidComponent(msg, t)) @@ -360,7 +353,7 @@ private[inc] class BloopComponentCompiler( } withTemporaryDirectory { tempDir => - val hydraSourceContents = unzip(sourceJar, tempDir) + val hydraSourceContents = unzip(sourceJar.toFile, tempDir) logger.debug(s"Sources from hydra bridge: $hydraSourceContents") // Unfortunately we can only use names to filter out, let's hope there's no clashes @@ -376,17 +369,17 @@ private[inc] class BloopComponentCompiler( // Extract bridge source contens in same folder with Hydra contents having preference val regularSourceContents = bloopBridgeSourceJars.foldLeft(Set.empty[File]) { case (extracted, sourceJar) => - extracted ++ unzip(sourceJar, tempDir, filter = filterOutConflicts) + extracted ++ unzip(sourceJar.toFile(), tempDir, filter = filterOutConflicts) } logger.debug(s"Sources from bloop bridge: $regularSourceContents") - val mergedJar = Files.createTempFile(HydraSupport.bridgeNamePrefix, "merged").toFile + val mergedJar = Files.createTempFile(HydraSupport.bridgeNamePrefix, "merged") logger.debug(s"Merged jar destination: $mergedJar") val allSourceContents = (hydraSourceContents ++ regularSourceContents).map(s => s -> relativize(tempDir, s).get) - zip(allSourceContents.toSeq, mergedJar) + zip(allSourceContents.toSeq, mergedJar.toFile(), time = None) Right(Vector(mergedJar)) } diff --git a/backend/src/main/scala/sbt/internal/inc/BloopZincLibraryManagement.scala b/backend/src/main/scala/sbt/internal/inc/BloopZincLibraryManagement.scala index 4d53abd51a..34c10ddd38 100644 --- a/backend/src/main/scala/sbt/internal/inc/BloopZincLibraryManagement.scala +++ b/backend/src/main/scala/sbt/internal/inc/BloopZincLibraryManagement.scala @@ -42,6 +42,12 @@ object BloopZincLibraryManagement { scheduler ) val loader = Some(new ClassLoaderCache(new URLClassLoader(new Array(0)))) - new AnalyzingCompiler(scalaInstance, compilerBridgeProvider, _ => (), loader) + new AnalyzingCompiler( + scalaInstance, + compilerBridgeProvider, + ClasspathOptionsUtil.boot(), + _ => (), + loader + ) } } diff --git a/backend/src/main/scala/sbt/internal/inc/bloop/BloopZincCompiler.scala b/backend/src/main/scala/sbt/internal/inc/bloop/BloopZincCompiler.scala index 8244f92469..9d93bf92c1 100644 --- a/backend/src/main/scala/sbt/internal/inc/bloop/BloopZincCompiler.scala +++ b/backend/src/main/scala/sbt/internal/inc/bloop/BloopZincCompiler.scala @@ -4,7 +4,6 @@ package sbt.internal.inc.bloop import java.io.File import java.util.concurrent.CompletableFuture -import bloop.{CompileMode, CompilerOracle} import bloop.reporter.ZincReporter import bloop.logging.ObservedLogger import bloop.tracing.BraveTracer @@ -19,6 +18,9 @@ import xsbti.compile._ import bloop.UniqueCompileInputs import scala.concurrent.Promise +import sbt.internal.inc.PlainVirtualFileConverter +import bloop.util.AnalysisUtils +import xsbti.VirtualFile object BloopZincCompiler { import bloop.logging.DebugFilter @@ -44,13 +46,13 @@ object BloopZincCompiler { */ def compile( in: Inputs, - compileMode: CompileMode, reporter: ZincReporter, logger: ObservedLogger[_], uniqueInputs: UniqueCompileInputs, manager: ClassFileManager, cancelPromise: Promise[Unit], - tracer: BraveTracer + tracer: BraveTracer, + classpathOptions: ClasspathOptions ): Task[CompileResult] = { val config = in.options() val setup = in.setup() @@ -81,7 +83,6 @@ object BloopZincCompiler { skip, incrementalCompilerOptions, extraOptions, - compileMode, manager, cancelPromise, tracer @@ -92,8 +93,8 @@ object BloopZincCompiler { def compileIncrementally( scalaCompiler: xsbti.compile.ScalaCompiler, javaCompiler: xsbti.compile.JavaCompiler, - sources: Array[File], - classpath: Seq[File], + sources: Array[VirtualFile], + classpath: Seq[VirtualFile], uniqueInputs: UniqueCompileInputs, output: Output, cache: GlobalsCache, @@ -109,7 +110,6 @@ object BloopZincCompiler { skip: Boolean = false, incrementalOptions: IncOptions, extra: List[(String, String)], - compileMode: CompileMode, manager: ClassFileManager, cancelPromise: Promise[Unit], tracer: BraveTracer @@ -126,12 +126,12 @@ object BloopZincCompiler { if (skip) Task.now(CompileResult.of(prev, config.currentSetup, false)) else { val setOfSources = sources.toSet - val compiler = BloopHighLevelCompiler(config, reporter, logger, tracer) + val compiler = BloopHighLevelCompiler(config, reporter, logger, tracer, classpathOptions) val lookup = new BloopLookup(config, previousSetup, logger) val analysis = invalidateAnalysisFromSetup(config.currentSetup, previousSetup, incrementalOptions.ignoredScalacOptions(), setOfSources, prev, manager, logger) // Scala needs the explicit type signature to infer the function type arguments - val compile: (Set[File], DependencyChanges, AnalysisCallback, ClassFileManager) => Task[Unit] = compiler.compile(_, _, _, _, compileMode, cancelPromise) + val compile: (Set[VirtualFile], DependencyChanges, AnalysisCallback, ClassFileManager) => Task[Unit] = compiler.compile(_, _, _, _, cancelPromise, classpathOptions) BloopIncremental .compile( setOfSources, @@ -143,7 +143,6 @@ object BloopZincCompiler { logger, reporter, config.incOptions, - compileMode, manager, tracer, HydraSupport.isEnabled(config.compiler.scalaInstance()) @@ -173,15 +172,18 @@ object BloopZincCompiler { setup: MiniSetup, previousSetup: Option[MiniSetup], ignoredScalacOptions: Array[String], - sources: Set[File], + sources: Set[VirtualFile], previousAnalysis: CompileAnalysis, manager: ClassFileManager, logger: ObservedLogger[_] ): CompileAnalysis = { // Copied from `Incremental` to pass in the class file manager we want - def prune(invalidatedSrcs: Set[File], previous0: CompileAnalysis, classfileManager: ClassFileManager): Analysis = { + def prune(invalidatedSrcs: Set[VirtualFile], previous0: CompileAnalysis, classfileManager: ClassFileManager): Analysis = { val previous = previous0 match { case a: Analysis => a } - classfileManager.delete(invalidatedSrcs.flatMap(previous.relations.products).toArray) + val toDelete = invalidatedSrcs.flatMap(previous.relations.products).toArray.collect { + case vf: VirtualFile => vf + } + classfileManager.delete(toDelete) previous -- invalidatedSrcs } @@ -226,8 +228,8 @@ object BloopZincCompiler { def configureAnalyzingCompiler( scalac: xsbti.compile.ScalaCompiler, javac: xsbti.compile.JavaCompiler, - sources: Seq[File], - classpath: Seq[File], + sources: Seq[VirtualFile], + classpath: Seq[VirtualFile], classpathHashes: Seq[FileHash], output: Output, cache: GlobalsCache, @@ -263,8 +265,8 @@ object BloopZincCompiler { val outputJar = JarUtils.createOutputJarContent(output) MixedAnalyzingCompiler.config( sources, + PlainVirtualFileConverter.converter, classpath, - classpathOptions, compileSetup, progress, previousAnalysis, @@ -276,7 +278,12 @@ object BloopZincCompiler { skip, cache, incrementalCompilerOptions, - outputJar + outputJar, + // deals with pipelining, not supported yet + earlyOutput = None, + // deals with pipelining, not supported yet + earlyAnalysisStore = None, + stamper = BloopStamps.initial ) } } diff --git a/backend/src/main/scala/sbt/internal/inc/bloop/ZincInternals.scala b/backend/src/main/scala/sbt/internal/inc/bloop/ZincInternals.scala index 204674da5a..64be4c221e 100644 --- a/backend/src/main/scala/sbt/internal/inc/bloop/ZincInternals.scala +++ b/backend/src/main/scala/sbt/internal/inc/bloop/ZincInternals.scala @@ -11,6 +11,9 @@ import sbt.internal.inc.javac.AnalyzingJavaCompiler import sbt.librarymanagement.{Configurations, ModuleID} import xsbti.compile.{ClasspathOptions, JavaCompiler} import xsbti.{ComponentProvider, Position} +import xsbti.VirtualFile +import bloop.util.AnalysisUtils +import xsbti.VirtualFileRef object ZincInternals { import sbt.internal.inc.JavaInterfaceUtil.EnrichOptional @@ -42,11 +45,11 @@ object ZincInternals { def instantiateJavaCompiler( javac: xsbti.compile.JavaCompiler, - classpath: Seq[File], + classpath: Seq[VirtualFile], instance: xsbti.compile.ScalaInstance, cpOptions: ClasspathOptions, - lookup: (String => Option[File]), - searchClasspath: Seq[File] + lookup: (String => Option[VirtualFile]), + searchClasspath: Seq[VirtualFile] ): JavaCompiler = { new AnalyzingJavaCompiler(javac, classpath, instance, cpOptions, lookup, searchClasspath) } @@ -55,7 +58,7 @@ object ZincInternals { import sbt.internal.util.Relation def copyRelations( relations: Relations, - rebase: File => File + rebase: VirtualFileRef => VirtualFileRef ): Relations = { val newSrcProd = Relation.empty ++ { relations.srcProd.all.map { diff --git a/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopAnalysisCallback.scala b/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopAnalysisCallback.scala index 38f97096eb..75175ae0fa 100644 --- a/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopAnalysisCallback.scala +++ b/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopAnalysisCallback.scala @@ -30,25 +30,31 @@ import sbt.internal.inc.UsedName import sbt.internal.inc.Analysis import sbt.internal.inc.Compilation import sbt.internal.inc.SourceInfos - -import bloop.CompileMode -import xsbti.compile.Signature +import sbt.internal.inc.UsedNames +import xsbti.VirtualFile +import bloop.util.AnalysisUtils +import java.nio.file.Path +import xsbti.VirtualFileRef +import xsbti.VirtualFileRef +import java.nio.file.Path +import xsbti.T2 +import sbt.internal.inc.PlainVirtualFileConverter trait IBloopAnalysisCallback extends xsbti.AnalysisCallback { def get: Analysis } final class BloopAnalysisCallback( - compileMode: CompileMode, internalBinaryToSourceClassName: String => Option[String], - internalSourceToClassNamesMap: File => Set[String], - externalAPI: (File, String) => Option[AnalyzedClass], + internalSourceToClassNamesMap: VirtualFile => Set[String], + externalAPI: (Path, String) => Option[AnalyzedClass], stampReader: ReadStamps, output: Output, options: IncOptions, manager: ClassFileManager ) extends IBloopAnalysisCallback { - private[this] val compilation: Compilation = Compilation(output) + + private[this] val compilation: Compilation = Compilation(System.currentTimeMillis(), output) override def toString = (List("Class APIs", "Object APIs", "Binary deps", "Products", "Source deps") zip @@ -64,46 +70,54 @@ final class BloopAnalysisCallback( import collection.mutable - private[this] val srcs = mutable.HashSet[File]() + private[this] val srcs = mutable.HashSet[Path]() private[this] val classApis = new mutable.HashMap[String, ApiInfo] private[this] val objectApis = new mutable.HashMap[String, ApiInfo] private[this] val classPublicNameHashes = new mutable.HashMap[String, Array[NameHash]] private[this] val objectPublicNameHashes = new mutable.HashMap[String, Array[NameHash]] private[this] val usedNames = new mutable.HashMap[String, mutable.HashSet[UsedName]] - private[this] val unreportedProblems = new mutable.HashMap[File, mutable.ListBuffer[Problem]] - private[this] val reportedProblems = new mutable.HashMap[File, mutable.ListBuffer[Problem]] - private[this] val mainClasses = new mutable.HashMap[File, mutable.ListBuffer[String]] - private[this] val binaryDeps = new mutable.HashMap[File, mutable.HashSet[File]] + private[this] val unreportedProblems = new mutable.HashMap[Path, mutable.ListBuffer[Problem]] + private[this] val reportedProblems = new mutable.HashMap[Path, mutable.ListBuffer[Problem]] + private[this] val mainClasses = new mutable.HashMap[Path, mutable.ListBuffer[String]] + private[this] val binaryDeps = new mutable.HashMap[Path, mutable.HashSet[Path]] // source file to set of generated (class file, binary class name); only non local classes are stored here - private[this] val nonLocalClasses = new mutable.HashMap[File, mutable.HashSet[(File, String)]] - private[this] val localClasses = new mutable.HashMap[File, mutable.HashSet[File]] + private[this] val nonLocalClasses = new mutable.HashMap[Path, mutable.HashSet[(Path, String)]] + private[this] val localClasses = new mutable.HashMap[Path, mutable.HashSet[Path]] // mapping between src class name and binary (flat) class name for classes generated from src file - private[this] val classNames = new mutable.HashMap[File, mutable.HashSet[(String, String)]] + private[this] val classNames = new mutable.HashMap[Path, mutable.HashSet[(String, String)]] // generated class file to its source class name - private[this] val classToSource = new mutable.HashMap[File, String] + private[this] val classToSource = new mutable.HashMap[Path, String] // internal source dependencies private[this] val intSrcDeps = new mutable.HashMap[String, mutable.HashSet[InternalDependency]] // external source dependencies private[this] val extSrcDeps = new mutable.HashMap[String, mutable.HashSet[ExternalDependency]] - private[this] val binaryClassName = new mutable.HashMap[File, String] + private[this] val binaryClassName = new mutable.HashMap[Path, String] // source files containing a macro def. private[this] val macroClasses = mutable.HashSet[String]() + private[this] val converter = PlainVirtualFileConverter.converter + private def add[A, B](map: mutable.HashMap[A, mutable.HashSet[B]], a: A, b: B): Unit = { map.getOrElseUpdate(a, new mutable.HashSet[B]()).+=(b) () } - def startSource(source: File): Unit = { + def startSource(source: VirtualFile): Unit = { + val sourcePath = converter.toPath(source) if (options.strictMode()) { assert( - !srcs.contains(source), + !srcs.contains(sourcePath), s"The startSource can be called only once per source file: $source" ) } - srcs.add(source) + srcs.add(sourcePath) () + + } + + def startSource(source: File): Unit = { + startSource(converter.toVirtualFile(source.toPath())) } def problem( @@ -116,7 +130,7 @@ final class BloopAnalysisCallback( for (source <- InterfaceUtil.jo2o(pos.sourceFile)) { val map = if (reported) reportedProblems else unreportedProblems map - .getOrElseUpdate(source, new mutable.ListBuffer()) + .getOrElseUpdate(source.toPath(), new mutable.ListBuffer()) .+=(InterfaceUtil.problem(category, pos, msg, severity, None)) } } @@ -127,13 +141,13 @@ final class BloopAnalysisCallback( } private[this] def externalBinaryDependency( - binary: File, + binary: Path, className: String, - source: File, + source: VirtualFileRef, context: DependencyContext ): Unit = { binaryClassName.put(binary, className) - add(binaryDeps, source, binary) + add(binaryDeps, converter.toPath(source), binary) } private[this] def externalSourceDependency( @@ -147,13 +161,13 @@ final class BloopAnalysisCallback( add(extSrcDeps, sourceClassName, dependency) } - def binaryDependency( - classFile: File, + override def binaryDependency( + classFile: Path, onBinaryClassName: String, fromClassName: String, - fromSourceFile: File, + fromSourceFile: VirtualFileRef, context: DependencyContext - ) = { + ): Unit = { internalBinaryToSourceClassName(onBinaryClassName) match { case Some(dependsOn) => // dependsOn is a source class name // dependency is a product of a source not included in this compilation @@ -169,12 +183,27 @@ final class BloopAnalysisCallback( } } } + def binaryDependency( + classFile: File, + onBinaryClassName: String, + fromClassName: String, + fromSourceFile: File, + context: DependencyContext + ) = { + binaryDependency( + classFile.toPath(), + onBinaryClassName, + fromClassName, + converter.toVirtualFile(fromSourceFile.toPath()), + context + ) + } private[this] def externalDependency( - classFile: File, + classFile: Path, onBinaryName: String, sourceClassName: String, - sourceFile: File, + sourceFile: VirtualFileRef, context: DependencyContext ): Unit = { externalAPI(classFile, onBinaryName) match { @@ -188,29 +217,47 @@ final class BloopAnalysisCallback( } } + override def generatedNonLocalClass( + source: VirtualFileRef, + classFile: Path, + binaryClassName: String, + srcClassName: String + ): Unit = { + val sourcePath = converter.toPath(source) + add(nonLocalClasses, sourcePath, (classFile, binaryClassName)) + add(classNames, sourcePath, (srcClassName, binaryClassName)) + classToSource.put(classFile, srcClassName) + () + + } + def generatedNonLocalClass( source: File, classFile: File, binaryClassName: String, srcClassName: String ): Unit = { - //println(s"Generated non local class ${source}, ${classFile}, ${binaryClassName}, ${srcClassName}") - add(nonLocalClasses, source, (classFile, binaryClassName)) - add(classNames, source, (srcClassName, binaryClassName)) - classToSource.put(classFile, srcClassName) + generatedNonLocalClass( + converter.toVirtualFile(source.toPath()), + classFile.toPath(), + binaryClassName, + srcClassName + ) + } + + override def generatedLocalClass(source: VirtualFileRef, classFile: Path): Unit = { + add(localClasses, converter.toPath(source), classFile) () } def generatedLocalClass(source: File, classFile: File): Unit = { - //println(s"Generated local class ${source}, ${classFile}") - add(localClasses, source, classFile) - () + generatedLocalClass(converter.toVirtualFile(source.toPath()), classFile.toPath()) } - def api(sourceFile: File, classApi: ClassLike): Unit = { + def api(sourceFile: VirtualFileRef, classApi: ClassLike): Unit = { import xsbt.api.{APIUtil, HashAPI} val className = classApi.name - if (APIUtil.isScalaSourceName(sourceFile.getName) && APIUtil.hasMacro(classApi)) + if (APIUtil.isScalaSourceName(sourceFile.name()) && APIUtil.hasMacro(classApi)) macroClasses.add(className) val shouldMinimize = !Incremental.apiDebug(options) val savedClassApi = if (shouldMinimize) APIUtil.minimize(classApi) else classApi @@ -231,11 +278,19 @@ final class BloopAnalysisCallback( } } - def mainClass(sourceFile: File, className: String): Unit = { - mainClasses.getOrElseUpdate(sourceFile, new mutable.ListBuffer).+=(className) + override def api(sourceFile: File, classApi: ClassLike): Unit = { + api(converter.toVirtualFile(sourceFile.toPath()), classApi) + } + + override def mainClass(sourceFile: VirtualFileRef, className: String): Unit = { + mainClasses.getOrElseUpdate(converter.toPath(sourceFile), new mutable.ListBuffer).+=(className) () } + def mainClass(sourceFile: File, className: String): Unit = { + mainClass(converter.toVirtualFile(sourceFile.toPath()), className) + } + def usedName(className: String, name: String, useScopes: ju.EnumSet[UseScope]) = add(usedNames, className, UsedName(name, useScopes)) @@ -245,14 +300,16 @@ final class BloopAnalysisCallback( addUsedNames(addCompilation(addProductsAndDeps(Analysis.empty))) } + // According to docs this is used for build tools and it's not unused in Bloop + override def isPickleJava(): Boolean = false + override def getPickleJarPair(): ju.Optional[T2[Path, Path]] = ju.Optional.empty() + def getOrNil[A, B](m: collection.Map[A, Seq[B]], a: A): Seq[B] = m.get(a).toList.flatten def addCompilation(base: Analysis): Analysis = base.copy(compilations = base.compilations.add(compilation)) def addUsedNames(base: Analysis): Analysis = (base /: usedNames) { case (a, (className, names)) => - (a /: names) { - case (a, name) => a.copy(relations = a.relations.addUsedName(className, name)) - } + a.copy(relations = a.relations.addUsedNames(UsedNames.fromMultiMap(Map(className -> names)))) } private def companionsWithHash(className: String): (Companions, HashAPI.Hash, HashAPI.Hash) = { @@ -300,7 +357,7 @@ final class BloopAnalysisCallback( def addProductsAndDeps(base: Analysis): Analysis = { (base /: srcs) { case (a, src) => - val stamp = stampReader.source(src) + val stamp = stampReader.source(converter.toVirtualFile(src)) val classesInSrc = classNames.getOrElse(src, new mutable.HashSet[(String, String)]()).map(_._1) val analyzedApis = classesInSrc.map(analyzeClass) @@ -309,12 +366,13 @@ final class BloopAnalysisCallback( getOrNil(unreportedProblems, src), getOrNil(mainClasses, src) ) - val binaries = binaryDeps.getOrElse(src, Nil: Iterable[File]) + val binaries = binaryDeps.getOrElse(src, Nil: Iterable[Path]) val localProds = localClasses - .getOrElse(src, new mutable.HashSet[File]()) + .getOrElse(src, new mutable.HashSet[Path]()) .map { classFile => - val classFileStamp = stampReader.product(classFile) - Analysis.LocalProduct(classFile, classFileStamp) + val virtualFile = converter.toVirtualFile(classFile) + val classFileStamp = stampReader.product(virtualFile) + Analysis.LocalProduct(virtualFile, classFileStamp) } val binaryToSrcClassName = (classNames @@ -324,12 +382,13 @@ final class BloopAnalysisCallback( }) .toMap val nonLocalProds = nonLocalClasses - .getOrElse(src, Nil: Iterable[(File, String)]) + .getOrElse(src, Nil: Iterable[(Path, String)]) .map { case (classFile, binaryClassName) => + val virtualFile = converter.toVirtualFile(classFile) val srcClassName = binaryToSrcClassName(binaryClassName) - val classFileStamp = stampReader.product(classFile) - Analysis.NonLocalProduct(srcClassName, binaryClassName, classFile, classFileStamp) + val classFileStamp = stampReader.product(virtualFile) + Analysis.NonLocalProduct(srcClassName, binaryClassName, virtualFile, classFileStamp) } val internalDeps = classesInSrc.flatMap(cls => @@ -338,10 +397,13 @@ final class BloopAnalysisCallback( val externalDeps = classesInSrc.flatMap(cls => extSrcDeps.getOrElse(cls, new mutable.HashSet[ExternalDependency]()) ) - val binDeps = binaries.map(d => (d, binaryClassName(d), stampReader binary d)) + val binDeps = binaries.map { d => + val virtual = converter.toVirtualFile(d) + (virtual, binaryClassName(d), stampReader.library(virtual)) + } a.addSource( - src, + converter.toVirtualFile(src), analyzedApis, stamp, info, @@ -360,27 +422,9 @@ final class BloopAnalysisCallback( * the Zinc API phase has run and collected them so that the class file * invalidation registers these files before compiling Java files incrementally. */ - manager.generated(classToSource.keysIterator.toArray) + manager.generated(classToSource.keysIterator.map(converter.toVirtualFile).toArray) } override def dependencyPhaseCompleted(): Unit = () override def classesInOutputJar(): java.util.Set[String] = ju.Collections.emptySet() - override def definedMacro(symbolName: String): Unit = { - compileMode.oracle.registerDefinedMacro(symbolName) - } - - override def invokedMacro(invokedMacroSymbol: String): Unit = { - compileMode.oracle.blockUntilMacroClasspathIsReady(invokedMacroSymbol) - } - - override def isPipeliningEnabled(): Boolean = compileMode.oracle.isPipeliningEnabled - override def downstreamSignatures(): Array[Signature] = - compileMode.oracle.collectDownstreamSignatures() - override def definedSignatures(signatures: Array[Signature]): Unit = { - compileMode.oracle.startDownstreamCompilations(signatures) - } - - override def invalidatedClassFiles(): Array[File] = { - manager.invalidatedClassFiles() - } } diff --git a/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopHighLevelCompiler.scala b/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopHighLevelCompiler.scala index a9825d9bff..c7dc46476c 100644 --- a/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopHighLevelCompiler.scala +++ b/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopHighLevelCompiler.scala @@ -7,7 +7,7 @@ import java.util.concurrent.CompletableFuture import bloop.reporter.ZincReporter import bloop.logging.ObservedLogger -import bloop.{CompileMode, JavaSignal} +import bloop.JavaSignal import bloop.tracing.BraveTracer import monix.eval.Task @@ -22,6 +22,11 @@ import scala.util.control.NonFatal import sbt.internal.inc.JarUtils import scala.concurrent.Promise import xsbt.InterfaceCompileCancelled +import bloop.util.AnalysisUtils +import sbt.internal.inc.PlainVirtualFileConverter +import java.nio.file.Path +import xsbti.VirtualFile +import java.nio.file.Files /** * Defines a high-level compiler after [[sbt.internal.inc.MixedAnalyzingCompiler]], with the @@ -44,7 +49,8 @@ final class BloopHighLevelCompiler( tracer: BraveTracer ) { private[this] final val setup = config.currentSetup - private[this] final val classpath = config.classpath.map(_.getAbsoluteFile) + private[this] final val classpath: Seq[VirtualFile] = config.classpath + private[this] final val classpathNio: Seq[Path] = classpath.map(PlainVirtualFileConverter.converter.toPath) private[this] val JavaCompleted: Promise[Unit] = Promise.successful(()) @@ -59,12 +65,12 @@ final class BloopHighLevelCompiler( * @return */ def compile( - sourcesToCompile: Set[File], + sourcesToCompile: Set[VirtualFile], changes: DependencyChanges, callback: AnalysisCallback, classfileManager: ClassFileManager, - compileMode: CompileMode, - cancelPromise: Promise[Unit] + cancelPromise: Promise[Unit], + classpathOptions: ClasspathOptions ): Task[Unit] = { def timed[T](label: String)(t: => T): T = { tracer.trace(label) { _ => @@ -74,75 +80,75 @@ final class BloopHighLevelCompiler( val outputDirs = { setup.output match { - case single: SingleOutput => List(single.getOutputDirectory) - case mult: MultipleOutput => mult.getOutputGroups.iterator.map(_.getOutputDirectory).toList + case single: SingleOutput => List(single.getOutputDirectoryAsPath()) + case mult: MultipleOutput => mult.getOutputGroups.iterator.map(_.getOutputDirectoryAsPath()).toList } } outputDirs.foreach { d => - if (!d.getPath.endsWith(".jar") && !d.exists()) - sbt.io.IO.createDirectory(d) + if (!d.endsWith(".jar") && !Files.exists(d)) + sbt.io.IO.createDirectory(d.toFile()) } val includedSources = config.sources.filter(sourcesToCompile) - val (javaSources, scalaSources) = includedSources.partition(_.getName.endsWith(".java")) + val (javaSources, scalaSources) = includedSources.partition(_.name().endsWith(".java")) val existsCompilation = javaSources.size + scalaSources.size > 0 if (existsCompilation) { - reporter.reportStartIncrementalCycle(includedSources, outputDirs) - } - - // Note `pickleURI` has already been used to create the analysis callback in `BloopZincCompiler` - val (pipeline: Boolean, batches: Option[Int], completeJava: Promise[Unit], fireJavaCompilation: Task[JavaSignal], separateJavaAndScala: Boolean) = { - compileMode match { - case _: CompileMode.Sequential => (false, None, JavaCompleted, Task.now(JavaSignal.ContinueCompilation), false) - case CompileMode.Pipelined(completeJava, _, fireJavaCompilation, _, separateJavaAndScala) => - (true, None, completeJava, fireJavaCompilation, separateJavaAndScala) - } + reporter.reportStartIncrementalCycle(includedSources, outputDirs.map(_.toFile())) } // Complete empty java promise if there are no java sources - if (javaSources.isEmpty && !completeJava.isCompleted) - completeJava.trySuccess(()) + if (javaSources.isEmpty && !JavaCompleted.isCompleted) + JavaCompleted.trySuccess(()) val compileScala: Task[Unit] = { if (scalaSources.isEmpty) Task.now(()) else { val sources = { - if (separateJavaAndScala) { + if (setup.order == CompileOrder.Mixed) { // No matter if it's scala->java or mixed, we populate java symbols from sources - val transitiveJavaSources = compileMode.oracle.askForJavaSourcesOfIncompleteCompilations - includedSources ++ transitiveJavaSources.filterNot(_.getName == "routes.java") + includedSources } else { - if (setup.order == CompileOrder.Mixed) includedSources - else scalaSources + scalaSources } } def compilerArgs: CompilerArguments = { import sbt.internal.inc.CompileFailed - if (scalac.scalaInstance.compilerJar() == null) { + if (scalac.scalaInstance.compilerJars().isEmpty) { throw new CompileFailed(new Array(0), s"Expected Scala compiler jar in Scala instance containing ${scalac.scalaInstance.allJars().mkString(", ")}", new Array(0)) } - if (scalac.scalaInstance.libraryJar() == null) { + if (scalac.scalaInstance.libraryJars().isEmpty) { throw new CompileFailed(new Array(0), s"Expected Scala library jar in Scala instance containing ${scalac.scalaInstance.allJars().mkString(", ")}", new Array(0)) } - new CompilerArguments(scalac.scalaInstance, config.classpathOptions) + new CompilerArguments(scalac.scalaInstance, classpathOptions) } def compileSources( - sources: Seq[File], + sources: Seq[VirtualFile], scalacOptions: Array[String], callback: AnalysisCallback ): Unit = { try { - val args = compilerArgs.apply(Nil, classpath, None, scalacOptions).toArray - scalac.compile(sources.toArray, changes, args, setup.output, callback, config.reporter, config.cache, logger, config.progress.toOptional) + val args = compilerArgs.makeArguments(Nil, classpathNio, scalacOptions) + scalac.compile( + sources.toArray, + classpath.toArray, + PlainVirtualFileConverter.converter, + changes, + args.toArray, + setup.output, + callback, + config.reporter, + config.progress.toOptional, + logger + ) } catch { case NonFatal(t) => // If scala compilation happens, complete the java promise so that it doesn't block - completeJava.tryFailure(t) + JavaCompleted.tryFailure(t) t match { case _: NullPointerException if cancelPromise.isCompleted => @@ -154,16 +160,13 @@ final class BloopHighLevelCompiler( def compileSequentially: Task[Unit] = Task { val scalacOptions = setup.options.scalacOptions - val args = compilerArgs.apply(Nil, classpath, None, scalacOptions).toArray + val args = compilerArgs.makeArguments(Nil, classpathNio, scalacOptions) timed("scalac") { compileSources(sources, scalacOptions, callback) } } - batches match { - case Some(batches) => sys.error("Parallel compilation is not yet supported!") - case None => compileSequentially - } + compileSequentially } } @@ -175,52 +178,25 @@ final class BloopHighLevelCompiler( ) val javaOptions = setup.options.javacOptions.toArray[String] try { - javac.compile(javaSources, javaOptions, setup.output, callback, incToolOptions, config.reporter, logger, config.progress) - completeJava.trySuccess(()) + javac.compile(javaSources, Nil, PlainVirtualFileConverter.converter, javaOptions, setup.output, None, callback, incToolOptions, config.reporter, logger, config.progress) + JavaCompleted.trySuccess(()) () } catch { case f: CompileFailed => // Intercept and report manually because https://github.com/sbt/zinc/issues/520 config.reporter.printSummary() - completeJava.tryFailure(f) + JavaCompleted.tryFailure(f) throw f } } } - val combinedTasks = { - if (separateJavaAndScala) { - val compileJavaSynchronized = { - fireJavaCompilation.flatMap { - case JavaSignal.ContinueCompilation => compileJava - case JavaSignal.FailFastCompilation(failedProjects) => - throw new StopPipelining(failedProjects) - } - } - - if (javaSources.isEmpty) compileScala - else { - if (setup.order == CompileOrder.JavaThenScala) { - Task.gatherUnordered(List(compileJavaSynchronized, compileScala)).map(_ => ()) - } else { - compileScala.flatMap(_ => compileJavaSynchronized) - } - } + val combinedTasks = + if (setup.order == CompileOrder.JavaThenScala) { + compileJava.flatMap(_ => compileScala) } else { - // Note that separate java and scala is not enabled under pipelining - fireJavaCompilation.flatMap { - case JavaSignal.ContinueCompilation => - if (setup.order == CompileOrder.JavaThenScala) { - compileJava.flatMap(_ => compileScala) - } else { - compileScala.flatMap(_ => compileJava) - } - - case JavaSignal.FailFastCompilation(failedProjects) => - throw new StopPipelining(failedProjects) - } + compileScala.flatMap(_ => compileJava) } - } Task(System.nanoTime).flatMap { nanoStart => combinedTasks.materialize.map { r => @@ -236,10 +212,10 @@ final class BloopHighLevelCompiler( } object BloopHighLevelCompiler { - def apply(config: CompileConfiguration, reporter: ZincReporter, logger: ObservedLogger[_], tracer: BraveTracer): BloopHighLevelCompiler = { + def apply(config: CompileConfiguration, reporter: ZincReporter, logger: ObservedLogger[_], tracer: BraveTracer, classpathOptions: ClasspathOptions): BloopHighLevelCompiler = { val (searchClasspath, entry) = MixedAnalyzingCompiler.searchClasspathAndLookup(config) val scalaCompiler = config.compiler.asInstanceOf[AnalyzingCompiler] - val javaCompiler = new AnalyzingJavaCompiler(config.javac, config.classpath, config.compiler.scalaInstance, config.classpathOptions, entry, searchClasspath) + val javaCompiler = new AnalyzingJavaCompiler(config.javac, config.classpath, config.compiler.scalaInstance, classpathOptions, entry, searchClasspath) new BloopHighLevelCompiler(scalaCompiler, javaCompiler, config, reporter, logger, tracer) } } diff --git a/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopIncremental.scala b/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopIncremental.scala index caefd3aed5..35223ac80e 100644 --- a/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopIncremental.scala +++ b/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopIncremental.scala @@ -4,11 +4,9 @@ package sbt.internal.inc.bloop.internal import java.io.File import java.util.concurrent.CompletableFuture -import bloop.CompilerOracle import bloop.UniqueCompileInputs import bloop.reporter.ZincReporter import bloop.tracing.BraveTracer -import bloop.CompileMode import monix.eval.Task import sbt.internal.inc.{Analysis, InvalidationProfiler, Lookup, Stamper, Stamps} @@ -17,12 +15,23 @@ import xsbti.AnalysisCallback import xsbti.api.AnalyzedClass import xsbti.compile.analysis.{ReadStamps, Stamp} import xsbti.compile._ +import xsbti.VirtualFile +import xsbti.VirtualFileRef +import bloop.util.AnalysisUtils +import sbt.internal.inc.PlainVirtualFileConverter +import java.nio.file.Path +import xsbti.PathBasedFile +import sbt.internal.inc.MappedFileConverter +import scala.tools.nsc.Properties object BloopIncremental { type CompileFunction = - (Set[File], DependencyChanges, AnalysisCallback, ClassFileManager) => Task[Unit] + (Set[VirtualFile], DependencyChanges, AnalysisCallback, ClassFileManager) => Task[Unit] + + private val converter = PlainVirtualFileConverter.converter + def compile( - sources: Iterable[File], + sources: Iterable[VirtualFile], uniqueInputs: UniqueCompileInputs, lookup: Lookup, compile: CompileFunction, @@ -31,12 +40,11 @@ object BloopIncremental { log: Logger, reporter: ZincReporter, options: IncOptions, - mode: CompileMode, manager: ClassFileManager, tracer: BraveTracer, isHydraEnabled: Boolean ): Task[(Boolean, Analysis)] = { - def getExternalAPI(lookup: Lookup): (File, String) => Option[AnalyzedClass] = { (_: File, binaryClassName: String) => + def getExternalAPI(lookup: Lookup): (Path, String) => Option[AnalyzedClass] = { (_: Path, binaryClassName: String) => lookup.lookupAnalysis(binaryClassName) flatMap { case (analysis: Analysis) => val sourceClassName = @@ -50,19 +58,19 @@ object BloopIncremental { val previous = previous0 match { case a: Analysis => a } val previousRelations = previous.relations val internalBinaryToSourceClassName = (binaryClassName: String) => previousRelations.productClassName.reverse(binaryClassName).headOption - val internalSourceToClassNamesMap: File => Set[String] = (f: File) => previousRelations.classNames(f) + val internalSourceToClassNamesMap: VirtualFile => Set[String] = (f: VirtualFile) => previousRelations.classNames(f) val builder: () => IBloopAnalysisCallback = { - if (!isHydraEnabled) () => new BloopAnalysisCallback(mode, internalBinaryToSourceClassName, internalSourceToClassNamesMap, externalAPI, current, output, options, manager) + if (!isHydraEnabled) () => new BloopAnalysisCallback(internalBinaryToSourceClassName, internalSourceToClassNamesMap, externalAPI, current, output, options, manager) else - () => new ConcurrentAnalysisCallback(mode, internalBinaryToSourceClassName, internalSourceToClassNamesMap, externalAPI, current, output, options, manager) + () => new ConcurrentAnalysisCallback(internalBinaryToSourceClassName, internalSourceToClassNamesMap, externalAPI, current, output, options, manager) } // We used to catch for `CompileCancelled`, but we prefer to propagate it so that Bloop catches it compileIncremental(sources, uniqueInputs, lookup, previous, current, compile, builder, reporter, log, output, options, manager, tracer) } def compileIncremental( - sources: Iterable[File], + sources: Iterable[VirtualFile], uniqueInputs: UniqueCompileInputs, lookup: Lookup, previous: Analysis, @@ -80,11 +88,21 @@ object BloopIncremental { )(implicit equivS: Equiv[Stamp]): Task[(Boolean, Analysis)] = { val setOfSources = sources.toSet val incremental = new BloopNameHashing(log, reporter, uniqueInputs, options, profiler.profileRun, tracer) - val initialChanges = incremental.detectInitialChanges(setOfSources, previous, current, lookup, output) + val initialChanges = incremental.detectInitialChanges(setOfSources, previous, current, lookup, converter, output) + def isJrt(path: Path) = path.getFileSystem.provider().getScheme == "jrt" val binaryChanges = new DependencyChanges { - val modifiedBinaries = initialChanges.binaryDeps.toArray + val modifiedLibraries = initialChanges.libraryDeps.toArray + + val modifiedBinaries: Array[File] = modifiedLibraries + .map(converter.toPath(_)) + .collect { + // jrt path is neither a jar nor a normal file + case path if !isJrt(path) => + path.toFile() + } + .distinct val modifiedClasses = initialChanges.external.allModified.toArray - def isEmpty = modifiedBinaries.isEmpty && modifiedClasses.isEmpty + def isEmpty = modifiedLibraries.isEmpty && modifiedClasses.isEmpty } val (initialInvClasses, initialInvSources) = @@ -102,7 +120,7 @@ object BloopIncremental { import sbt.internal.inc.{ClassFileManager => ClassFileManagerImpl} val analysisTask = { - val doCompile = (srcs: Set[File], changes: DependencyChanges) => { + val doCompile = (srcs: Set[VirtualFile], changes: DependencyChanges) => { for { callback <- Task.now(callbackBuilder()) _ <- compile(srcs, changes, callback, manager) diff --git a/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopLookup.scala b/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopLookup.scala index c4f9e24b56..ed17e57b85 100644 --- a/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopLookup.scala +++ b/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopLookup.scala @@ -45,9 +45,9 @@ object BloopLookup { // bloop server sessions, it's just an optimization to avoid checking for isDir BloopStamps.isDirectoryHash(fh) || // If directory exists, filter it out - fh.file.isDirectory() || + fh.file.toFile().isDirectory() || // If directory is empty classes dir, filter it out - CompileOutPaths.hasEmptyClassesDir(AbsolutePath(fh.file.toPath)) + CompileOutPaths.hasEmptyClassesDir(AbsolutePath(fh.file)) } } } diff --git a/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopNameHashing.scala b/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopNameHashing.scala index 3065998af3..8a8c9ee2f4 100644 --- a/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopNameHashing.scala +++ b/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopNameHashing.scala @@ -2,7 +2,6 @@ package sbt.internal.inc.bloop.internal import java.io.File -import _root_.bloop.CompilerOracle import _root_.bloop.UniqueCompileInputs import _root_.bloop.reporter.ZincReporter import _root_.bloop.tracing.BraveTracer @@ -12,6 +11,10 @@ import sbt.util.Logger import sbt.internal.inc._ import xsbti.compile.{ClassFileManager, DependencyChanges, IncOptions} import xsbti.compile.Output +import xsbti.VirtualFile +import xsbti.VirtualFile +import xsbti.VirtualFileRef +import xsbti.FileConverter /** * Defines Bloop's version of `IncrementalNameHashing` that extends Zinc's original @@ -60,12 +63,12 @@ private final class BloopNameHashing( */ def entrypoint( invalidatedClasses: Set[String], - initialChangedSources: Set[File], - allSources: Set[File], + initialChangedSources: Set[VirtualFileRef], + allSources: Set[VirtualFile], binaryChanges: DependencyChanges, lookup: ExternalLookup, previous: Analysis, - compileTask: (Set[File], DependencyChanges) => Task[Analysis], + compileTask: (Set[VirtualFile], DependencyChanges) => Task[Analysis], manager: ClassFileManager, cycleNum: Int ): Task[Analysis] = { @@ -78,7 +81,12 @@ private final class BloopNameHashing( // Computes which source files are mapped to the invalidated classes and recompile them val invalidatedSources = - mapInvalidationsToSources(classesToRecompile, initialChangedSources, allSources, previous) + mapInvalidationsToSources( + classesToRecompile, + initialChangedSources, + allSources.map(v => v: VirtualFileRef), + previous + ).collect { case f: VirtualFile => f } recompileClasses(invalidatedSources, binaryChanges, previous, compileTask, manager).flatMap { current => @@ -139,26 +147,27 @@ private final class BloopNameHashing( import xsbti.compile.analysis.{ReadStamps, Stamp => XStamp} override def detectInitialChanges( - sources: Set[File], + sources: Set[VirtualFile], previousAnalysis: Analysis, stamps: ReadStamps, lookup: Lookup, + converter: FileConverter, output: Output )(implicit equivS: Equiv[XStamp]): InitialChanges = { tracer.traceVerbose("detecting initial changes") { tracer => // Copy pasting from IncrementalCommon to optimize/remove IO work - import IncrementalCommon.{isBinaryModified, findExternalAnalyzedClass} val previous = previousAnalysis.stamps val previousRelations = previousAnalysis.relations - val hashesMap = uniqueInputs.sources.map(kv => kv.source.toFile -> kv.hash).toMap + val hashesMap = uniqueInputs.sources.map(kv => kv.source -> kv.hash).toMap val sourceChanges = tracer.traceVerbose("source changes") { _ => lookup.changedSources(previousAnalysis).getOrElse { val previousSources = previous.allSources.toSet - new UnderlyingChanges[File] { - private val inBoth = previousSources & sources + new UnderlyingChanges[VirtualFileRef] { + private val sourceRefs = sources.map(f => f: VirtualFileRef) + private val inBoth = previousSources & sourceRefs val removed = previousSources -- inBoth - val added = sources -- inBoth + val added = sourceRefs -- inBoth val (changed, unmodified) = inBoth.partition { f => import sbt.internal.inc.Hash // We compute hashes via xxHash in Bloop, so we adapt them to the zinc hex format @@ -166,26 +175,34 @@ private final class BloopNameHashing( .get(f) .map(bloopHash => BloopStamps.fromBloopHashToZincHash(bloopHash)) .getOrElse(BloopStamps.forHash(f)) - !equivS.equiv(previous.source(f), newStamp) + !equivS.equiv(previous.sources(f), newStamp) } } } } // Unnecessary to compute removed products because we can ensure read-only classes dir is untouched - val removedProducts = Set.empty[File] - val changedBinaries: Set[File] = tracer.traceVerbose("changed binaries") { _ => + val removedProducts = Set.empty[VirtualFileRef] + val changedBinaries: Set[VirtualFileRef] = tracer.traceVerbose("changed binaries") { _ => lookup.changedBinaries(previousAnalysis).getOrElse { - val detectChange = - isBinaryModified(false, lookup, previous, stamps, previousRelations, log) - previous.allBinaries.filter(detectChange).toSet + val detectChange = IncrementalCommon.isLibraryModified( + false, + lookup, + previous, + stamps, + previousRelations, + PlainVirtualFileConverter.converter, + log + ) + previous.allLibraries.filter(detectChange).toSet } } val externalApiChanges: APIChanges = tracer.traceVerbose("external api changes") { _ => val incrementalExternalChanges = { val previousAPIs = previousAnalysis.apis - val externalFinder = findExternalAnalyzedClass(lookup) _ + val externalFinder = + lookup.lookupAnalyzedClass(_: String, None).getOrElse(APIs.emptyAnalyzedClass) detectAPIChanges( previousAPIs.allExternals, previousAPIs.externalAPI, @@ -206,21 +223,28 @@ private final class BloopNameHashing( } def recompileClasses( - sources: Set[File], + sources: Set[VirtualFile], binaryChanges: DependencyChanges, previous: Analysis, - compileTask: (Set[File], DependencyChanges) => Task[Analysis], + compileTask: (Set[VirtualFile], DependencyChanges) => Task[Analysis], classfileManager: ClassFileManager ): Task[Analysis] = { val pruned = - IncrementalCommon.pruneClassFilesOfInvalidations(sources, previous, classfileManager) + IncrementalCommon.pruneClassFilesOfInvalidations( + sources, + previous, + classfileManager, + PlainVirtualFileConverter.converter + ) debug("********* Pruned: \n" + pruned.relations + "\n*********") compileTask(sources, binaryChanges).map { fresh => debug("********* Fresh: \n" + fresh.relations + "\n*********") /* This is required for both scala compilation and forked java compilation, despite * being redundant for the most common Java compilation (using the local compiler). */ - classfileManager.generated(fresh.relations.allProducts.toArray) + classfileManager.generated(fresh.relations.allProducts.collect { + case v: VirtualFile => v + }.toArray) val merged = pruned ++ fresh debug("********* Merged: \n" + merged.relations + "\n*********") diff --git a/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopStamps.scala b/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopStamps.scala index e21c299a68..80da9434a2 100644 --- a/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopStamps.scala +++ b/backend/src/main/scala/sbt/internal/inc/bloop/internal/BloopStamps.scala @@ -10,14 +10,19 @@ import sbt.internal.inc.Stamps import sbt.internal.inc.Stamper import sbt.internal.inc.EmptyStamp import xsbti.compile.analysis.{ReadStamps, Stamp} +import xsbti.VirtualFileRef +import bloop.util.AnalysisUtils +import java.nio.file.Path +import sbt.internal.inc.PlainVirtualFileConverter object BloopStamps { + private val converter = PlainVirtualFileConverter.converter def initial: ReadStamps = { Stamps.initial( - Stamper.forLastModified, + Stamper.forLastModifiedInRootPaths(converter), // The hash is for the sources BloopStamps.forHash, - Stamper.forLastModified + Stamper.forHashInRootPaths(converter) ) } @@ -25,17 +30,17 @@ object BloopStamps { private final val directoryHash = scala.util.Random.nextInt() final val cancelledHash = scala.util.Random.nextInt() - def emptyHash(file: File): FileHash = FileHash.of(file, emptyHash) - def cancelledHash(file: File): FileHash = FileHash.of(file, cancelledHash) + def emptyHash(path: Path): FileHash = FileHash.of(path, emptyHash) + def cancelledHash(path: Path): FileHash = FileHash.of(path, cancelledHash) - def directoryHash(file: File): FileHash = FileHash.of(file, directoryHash) + def directoryHash(path: Path): FileHash = FileHash.of(path, directoryHash) def isDirectoryHash(fh: FileHash): Boolean = fh.hash == directoryHash - def forHash(file: File): Hash = { - fromBloopHashToZincHash(ByteHasher.hashFileContents(file)) + def forHash(file: VirtualFileRef): Hash = { + fromBloopHashToZincHash(ByteHasher.hashFileContents(converter.toPath(file).toFile())) } - def emptyStampFor(file: File): Stamp = EmptyStamp + def emptyStamps: Stamp = EmptyStamp def fromBloopHashToZincHash(hash: Int): Hash = { val hex = hash.toString diff --git a/backend/src/main/scala/sbt/internal/inc/bloop/internal/ConcurrentAnalysisCallback.scala b/backend/src/main/scala/sbt/internal/inc/bloop/internal/ConcurrentAnalysisCallback.scala index 21535e1402..59cdc93be5 100644 --- a/backend/src/main/scala/sbt/internal/inc/bloop/internal/ConcurrentAnalysisCallback.scala +++ b/backend/src/main/scala/sbt/internal/inc/bloop/internal/ConcurrentAnalysisCallback.scala @@ -30,9 +30,13 @@ import sbt.internal.inc.UsedName import sbt.internal.inc.Analysis import sbt.internal.inc.Compilation import sbt.internal.inc.SourceInfos - -import bloop.CompileMode -import xsbti.compile.Signature +import xsbti.VirtualFile +import sbt.internal.inc.UsedNames +import java.nio.file.Path +import xsbti.VirtualFileRef +import xsbti.T2 +import bloop.util.AnalysisUtils +import sbt.internal.inc.PlainVirtualFileConverter /** * This class provides a thread-safe implementation of `xsbti.AnalysisCallback` which is required to compile with the @@ -45,16 +49,16 @@ import xsbti.compile.Signature * IMPORTANT: All modifications made to BloopAnalysisCallback` must be replicated here. */ final class ConcurrentAnalysisCallback( - compileMode: CompileMode, internalBinaryToSourceClassName: String => Option[String], - internalSourceToClassNamesMap: File => Set[String], - externalAPI: (File, String) => Option[AnalyzedClass], + internalSourceToClassNamesMap: VirtualFile => Set[String], + externalAPI: (Path, String) => Option[AnalyzedClass], stampReader: ReadStamps, output: Output, options: IncOptions, manager: ClassFileManager ) extends IBloopAnalysisCallback { - private[this] val compilation: Compilation = Compilation(output) + + private[this] val compilation: Compilation = Compilation(System.currentTimeMillis(), output) override def toString = (List("Class APIs", "Object APIs", "Binary deps", "Products", "Source deps") zip @@ -73,48 +77,55 @@ final class ConcurrentAnalysisCallback( private type ConcurrentSet[A] = ConcurrentHashMap.KeySetView[A, java.lang.Boolean] - private[this] val srcs = ConcurrentHashMap.newKeySet[File]() + private[this] val srcs = ConcurrentHashMap.newKeySet[Path]() private[this] val classApis = new TrieMap[String, ApiInfo] private[this] val objectApis = new TrieMap[String, ApiInfo] private[this] val classPublicNameHashes = new TrieMap[String, Array[NameHash]] private[this] val objectPublicNameHashes = new TrieMap[String, Array[NameHash]] private[this] val usedNames = new TrieMap[String, ConcurrentSet[UsedName]] - private[this] val unreportedProblems = new TrieMap[File, ConcurrentLinkedQueue[Problem]] - private[this] val reportedProblems = new TrieMap[File, ConcurrentLinkedQueue[Problem]] - private[this] val mainClasses = new TrieMap[File, ConcurrentLinkedQueue[String]] - private[this] val binaryDeps = new TrieMap[File, ConcurrentSet[File]] + private[this] val unreportedProblems = new TrieMap[Path, ConcurrentLinkedQueue[Problem]] + private[this] val reportedProblems = new TrieMap[Path, ConcurrentLinkedQueue[Problem]] + private[this] val mainClasses = new TrieMap[Path, ConcurrentLinkedQueue[String]] + private[this] val binaryDeps = new TrieMap[Path, ConcurrentSet[Path]] // source file to set of generated (class file, binary class name); only non local classes are stored here - private[this] val nonLocalClasses = new TrieMap[File, ConcurrentSet[(File, String)]] - private[this] val localClasses = new TrieMap[File, ConcurrentSet[File]] + private[this] val nonLocalClasses = new TrieMap[Path, ConcurrentSet[(Path, String)]] + private[this] val localClasses = new TrieMap[Path, ConcurrentSet[Path]] // mapping between src class name and binary (flat) class name for classes generated from src file - private[this] val classNames = new TrieMap[File, ConcurrentSet[(String, String)]] + private[this] val classNames = new TrieMap[Path, ConcurrentSet[(String, String)]] // generated class file to its source class name - private[this] val classToSource = new TrieMap[File, String] + private[this] val classToSource = new TrieMap[Path, String] // internal source dependencies private[this] val intSrcDeps = new TrieMap[String, ConcurrentSet[InternalDependency]] // external source dependencies private[this] val extSrcDeps = new TrieMap[String, ConcurrentSet[ExternalDependency]] - private[this] val binaryClassName = new TrieMap[File, String] + private[this] val binaryClassName = new TrieMap[Path, String] // source files containing a macro def. private[this] val macroClasses = ConcurrentHashMap.newKeySet[String]() + private[this] val converter = PlainVirtualFileConverter.converter + private def add[A, B](map: TrieMap[A, ConcurrentSet[B]], a: A, b: B): Unit = { map.getOrElseUpdate(a, ConcurrentHashMap.newKeySet[B]()).add(b) () } - def startSource(source: File): Unit = { + def startSource(source: VirtualFile): Unit = { + val sourcePath = converter.toPath(source) if (options.strictMode()) { assert( !srcs.contains(source), s"The startSource can be called only once per source file: $source" ) } - srcs.add(source) + srcs.add(sourcePath) () } + def startSource(source: File): Unit = { + startSource(converter.toVirtualFile(source.toPath())) + } + def problem( category: String, pos: Position, @@ -125,7 +136,7 @@ final class ConcurrentAnalysisCallback( for (source <- InterfaceUtil.jo2o(pos.sourceFile)) { val map = if (reported) reportedProblems else unreportedProblems map - .getOrElseUpdate(source, new ConcurrentLinkedQueue) + .getOrElseUpdate(source.toPath(), new ConcurrentLinkedQueue) .add(InterfaceUtil.problem(category, pos, msg, severity, None)) } } @@ -136,13 +147,13 @@ final class ConcurrentAnalysisCallback( } private[this] def externalBinaryDependency( - binary: File, + binary: Path, className: String, - source: File, + source: VirtualFileRef, context: DependencyContext ): Unit = { binaryClassName.put(binary, className) - add(binaryDeps, source, binary) + add(binaryDeps, converter.toPath(source), binary) } private[this] def externalSourceDependency( @@ -157,10 +168,10 @@ final class ConcurrentAnalysisCallback( } def binaryDependency( - classFile: File, + classFile: Path, onBinaryClassName: String, fromClassName: String, - fromSourceFile: File, + fromSourceFile: VirtualFileRef, context: DependencyContext ) = { internalBinaryToSourceClassName(onBinaryClassName) match { @@ -179,11 +190,27 @@ final class ConcurrentAnalysisCallback( } } - private[this] def externalDependency( + def binaryDependency( classFile: File, + onBinaryClassName: String, + fromClassName: String, + fromSourceFile: File, + context: DependencyContext + ) = { + binaryDependency( + classFile.toPath(), + onBinaryClassName, + fromClassName, + converter.toVirtualFile(fromSourceFile.toPath()), + context + ) + } + + private[this] def externalDependency( + classFile: Path, onBinaryName: String, sourceClassName: String, - sourceFile: File, + sourceFile: VirtualFileRef, context: DependencyContext ): Unit = { externalAPI(classFile, onBinaryName) match { @@ -198,28 +225,46 @@ final class ConcurrentAnalysisCallback( } def generatedNonLocalClass( - source: File, - classFile: File, + source: VirtualFileRef, + classFile: Path, binaryClassName: String, srcClassName: String ): Unit = { + val sourcePath = converter.toPath(source) //println(s"Generated non local class ${source}, ${classFile}, ${binaryClassName}, ${srcClassName}") - add(nonLocalClasses, source, (classFile, binaryClassName)) - add(classNames, source, (srcClassName, binaryClassName)) + add(nonLocalClasses, sourcePath, (classFile, binaryClassName)) + add(classNames, sourcePath, (srcClassName, binaryClassName)) classToSource.put(classFile, srcClassName) () } - def generatedLocalClass(source: File, classFile: File): Unit = { - //println(s"Generated local class ${source}, ${classFile}") - add(localClasses, source, classFile) + def generatedNonLocalClass( + source: File, + classFile: File, + binaryClassName: String, + srcClassName: String + ): Unit = { + generatedNonLocalClass( + converter.toVirtualFile(source.toPath()), + classFile.toPath(), + binaryClassName, + srcClassName + ) + } + + override def generatedLocalClass(source: VirtualFileRef, classFile: Path): Unit = { + add(localClasses, converter.toPath(source), classFile) () } - def api(sourceFile: File, classApi: ClassLike): Unit = { + def generatedLocalClass(source: File, classFile: File): Unit = { + generatedLocalClass(converter.toVirtualFile(source.toPath()), classFile.toPath()) + } + + override def api(sourceFile: VirtualFileRef, classApi: ClassLike): Unit = { import xsbt.api.{APIUtil, HashAPI} val className = classApi.name - if (APIUtil.isScalaSourceName(sourceFile.getName) && APIUtil.hasMacro(classApi)) + if (APIUtil.isScalaSourceName(sourceFile.name()) && APIUtil.hasMacro(classApi)) macroClasses.add(className) val shouldMinimize = !Incremental.apiDebug(options) val savedClassApi = if (shouldMinimize) APIUtil.minimize(classApi) else classApi @@ -240,11 +285,21 @@ final class ConcurrentAnalysisCallback( } } - def mainClass(sourceFile: File, className: String): Unit = { - mainClasses.getOrElseUpdate(sourceFile, new ConcurrentLinkedQueue).add(className) + override def api(sourceFile: File, classApi: ClassLike): Unit = { + api(converter.toVirtualFile(sourceFile.toPath()), classApi) + } + + override def mainClass(sourceFile: VirtualFileRef, className: String): Unit = { + mainClasses + .getOrElseUpdate(converter.toPath(sourceFile), new ConcurrentLinkedQueue) + .add(className) () } + def mainClass(sourceFile: File, className: String): Unit = { + mainClass(converter.toVirtualFile(sourceFile.toPath()), className) + } + def usedName(className: String, name: String, useScopes: ju.EnumSet[UseScope]) = add(usedNames, className, UsedName(name, useScopes)) @@ -254,15 +309,19 @@ final class ConcurrentAnalysisCallback( addUsedNames(addCompilation(addProductsAndDeps(Analysis.empty))) } + // According to docs this is used for build tools and it's not unused in Bloop + override def isPickleJava(): Boolean = false + override def getPickleJarPair(): ju.Optional[T2[Path, Path]] = ju.Optional.empty() + def getOrNil[A, B](m: collection.Map[A, Seq[B]], a: A): Seq[B] = m.get(a).toList.flatten def addCompilation(base: Analysis): Analysis = base.copy(compilations = base.compilations.add(compilation)) def addUsedNames(base: Analysis): Analysis = (base /: usedNames) { case (a, (className, names)) => import scala.collection.JavaConverters._ - names.asScala.foldLeft(a) { - case (a, name) => a.copy(relations = a.relations.addUsedName(className, name)) - } + a.copy(relations = + a.relations.addUsedNames(UsedNames.fromMultiMap(Map(className -> names.asScala))) + ) } private def companionsWithHash(className: String): (Companions, HashAPI.Hash, HashAPI.Hash) = { @@ -311,7 +370,8 @@ final class ConcurrentAnalysisCallback( import scala.collection.JavaConverters._ srcs.asScala.foldLeft(base) { case (a, src) => - val stamp = stampReader.source(src) + val sourceV = converter.toVirtualFile(src) + val stamp = stampReader.source(sourceV) val classesInSrc = classNames .getOrElse(src, ConcurrentHashMap.newKeySet[(String, String)]()) @@ -323,13 +383,14 @@ final class ConcurrentAnalysisCallback( getOrNil(unreportedProblems.mapValues { _.asScala.toSeq }, src), getOrNil(mainClasses.mapValues { _.asScala.toSeq }, src) ) - val binaries = binaryDeps.getOrElse(src, ConcurrentHashMap.newKeySet[File]).asScala + val binaries = binaryDeps.getOrElse(src, ConcurrentHashMap.newKeySet[Path]).asScala val localProds = localClasses - .getOrElse(src, ConcurrentHashMap.newKeySet[File]()) + .getOrElse(src, ConcurrentHashMap.newKeySet[Path]()) .asScala .map { classFile => - val classFileStamp = stampReader.product(classFile) - Analysis.LocalProduct(classFile, classFileStamp) + val classFileV = converter.toVirtualFile(classFile) + val classFileStamp = stampReader.product(classFileV) + Analysis.LocalProduct(classFileV, classFileStamp) } val binaryToSrcClassName = (classNames @@ -340,13 +401,14 @@ final class ConcurrentAnalysisCallback( }) .toMap val nonLocalProds = nonLocalClasses - .getOrElse(src, ConcurrentHashMap.newKeySet[(File, String)]()) + .getOrElse(src, ConcurrentHashMap.newKeySet[(Path, String)]()) .asScala .map { case (classFile, binaryClassName) => + val classFileV = converter.toVirtualFile(classFile) val srcClassName = binaryToSrcClassName(binaryClassName) - val classFileStamp = stampReader.product(classFile) - Analysis.NonLocalProduct(srcClassName, binaryClassName, classFile, classFileStamp) + val classFileStamp = stampReader.product(classFileV) + Analysis.NonLocalProduct(srcClassName, binaryClassName, classFileV, classFileStamp) } val internalDeps = classesInSrc.flatMap(cls => @@ -355,10 +417,13 @@ final class ConcurrentAnalysisCallback( val externalDeps = classesInSrc.flatMap(cls => extSrcDeps.getOrElse(cls, ConcurrentHashMap.newKeySet[ExternalDependency]()).asScala ) - val binDeps = binaries.map(d => (d, binaryClassName(d), stampReader binary d)) + val binDeps = binaries.map { d => + val virtual = converter.toVirtualFile(d) + (virtual, binaryClassName(d), stampReader.library(virtual)) + } a.addSource( - src, + sourceV, analyzedApis, stamp, info, @@ -373,27 +438,9 @@ final class ConcurrentAnalysisCallback( override def apiPhaseCompleted(): Unit = { // See [[BloopAnalysisCallback.apiPhaseCompleted]] - manager.generated(classToSource.keysIterator.toArray) + manager.generated(classToSource.keysIterator.map(converter.toVirtualFile).toArray) } override def dependencyPhaseCompleted(): Unit = () override def classesInOutputJar(): java.util.Set[String] = ju.Collections.emptySet() - override def definedMacro(symbolName: String): Unit = { - compileMode.oracle.registerDefinedMacro(symbolName) - } - - override def invokedMacro(invokedMacroSymbol: String): Unit = { - compileMode.oracle.blockUntilMacroClasspathIsReady(invokedMacroSymbol) - } - - override def isPipeliningEnabled(): Boolean = compileMode.oracle.isPipeliningEnabled - override def downstreamSignatures(): Array[Signature] = - compileMode.oracle.collectDownstreamSignatures() - override def definedSignatures(signatures: Array[Signature]): Unit = { - compileMode.oracle.startDownstreamCompilations(signatures) - } - - override def invalidatedClassFiles(): Array[File] = { - manager.invalidatedClassFiles() - } } diff --git a/backend/src/main/scala/sbt/internal/inc/bloop/internal/StopPipelining.scala b/backend/src/main/scala/sbt/internal/inc/bloop/internal/StopPipelining.scala deleted file mode 100644 index 93d2b44a26..0000000000 --- a/backend/src/main/scala/sbt/internal/inc/bloop/internal/StopPipelining.scala +++ /dev/null @@ -1,15 +0,0 @@ -package sbt.internal.inc.bloop.internal - -/** - * Defines a stop point for pipelined compilation. - * - * Pipelining forces the compilation of dependent modules while dependent modules are being - * compiled. If there is an error in any of the previous Scala projects, the compilation - * of the projects that depend on the failed project need to fail fast. - * - * `StopPipelining` is the way to stop pipelined compilation from the guts of Zinc. We throw - * this exception from deep inside `BloopHighLevelCompiler`, and then we catch it in - * `bloop.Compiler` and translate it to a `Compiler.Blocked` result. - */ -final class StopPipelining(val failedProjectNames: List[String]) - extends Exception(s"Pipelining stopped, projects ${failedProjectNames} failed to compile.") diff --git a/backend/src/main/scala/sbt/internal/inc/javac/BloopForkedJavaUtils.scala b/backend/src/main/scala/sbt/internal/inc/javac/BloopForkedJavaUtils.scala index 178ed3225f..f86d78ef6c 100644 --- a/backend/src/main/scala/sbt/internal/inc/javac/BloopForkedJavaUtils.scala +++ b/backend/src/main/scala/sbt/internal/inc/javac/BloopForkedJavaUtils.scala @@ -3,12 +3,13 @@ package sbt.internal.inc.javac import java.io.File import xsbti.Logger import xsbti.Reporter +import java.nio.file.Path object BloopForkedJavaUtils { def launch( javac: Option[File], binaryName: String, - sources: Seq[File], + sources: Seq[Path], options: Seq[String], log: Logger, reporter: Reporter @@ -16,7 +17,7 @@ object BloopForkedJavaUtils { def normalizeSlash(s: String) = s.replace(File.separatorChar, '/') val (jArgs, nonJArgs) = options.partition(_.startsWith("-J")) - val allArguments = nonJArgs ++ sources.map(_.getAbsolutePath) + val allArguments = nonJArgs ++ sources.map(_.toAbsolutePath().toString()) val exe = javac match { case None => binaryName diff --git a/backend/src/test/scala/bloop/CompilerCacheSpec.scala b/backend/src/test/scala/bloop/CompilerCacheSpec.scala index 4af1be6ee3..d83f182de5 100644 --- a/backend/src/test/scala/bloop/CompilerCacheSpec.scala +++ b/backend/src/test/scala/bloop/CompilerCacheSpec.scala @@ -62,7 +62,6 @@ class CompilerCacheSpec { val classFileManager = new ClassFileManager { override def delete(classes: Array[File]): Unit = () - override def invalidatedClassFiles(): Array[File] = Array.empty override def generated(classes: Array[File]): Unit = () override def complete(success: Boolean): Unit = () } diff --git a/benchmark-bridge b/benchmark-bridge index aae5156dcb..8489b0de9f 160000 --- a/benchmark-bridge +++ b/benchmark-bridge @@ -1 +1 @@ -Subproject commit aae5156dcb41f55b24836f33da28d835fb5b9265 +Subproject commit 8489b0de9ff82d7bbe2450da07b938fabb6605b1 diff --git a/bin/run-benchmarks.sh b/bin/run-benchmarks.sh index 50f12f511d..3333de8ccb 100755 --- a/bin/run-benchmarks.sh +++ b/bin/run-benchmarks.sh @@ -91,23 +91,23 @@ main() { #"-wi 4 -i 4 -f1 -t1 -p project=lichess -p projectName=lila-test" #"-wi 15 -i 10 -f1 -t1 -p project=sbt -p projectName=sbtRoot" #"-wi 8 -i 5 -f1 -t1 -p project=frontend -p projectName=root-test" - #"-wi 8 -i 5 -f1 -t1 -p project=finagle -p projectName=finagle-test" + "-wi 8 -i 5 -f1 -t1 -p project=finagle -p projectName=finagle-test" "-wi 10 -i 10 -f1 -t1 -p project=algebird -p projectName=algebird-test" - #"-wi 20 -i 10 -f1 -t1 -p project=scalatra -p projectName=scalatra-project-test" + "-wi 20 -i 10 -f1 -t1 -p project=scalatra -p projectName=scalatra-project-test" "-wi 15 -i 5 -f1 -t1 -p project=atlas -p projectName=root-test" "-wi 20 -i 10 -f1 -t1 -p project=grid -p projectName=grid-test" - #"-wi 7 -i 5 -f1 -t1 -p project=akka -p projectName=akka-test" - #"-wi 10 -i 5 -f1 -t1 -p project=circe -p projectName=circe-test" + "-wi 7 -i 5 -f1 -t1 -p project=akka -p projectName=akka-test" + # "-wi 10 -i 5 -f1 -t1 -p project=circe -p projectName=circe-test" #"-wi 10 -i 5 -f1 -t1 -p project=linkerd -p projectName=all-test" - #"-wi 20 -i 10 -f1 -t1 -p project=summingbird -p projectName=summingbird-test" + "-wi 20 -i 10 -f1 -t1 -p project=summingbird -p projectName=summingbird-test" "-wi 5 -i 5 -f1 -t1 -p project=http4s -p projectName=root-test" #"-wi 15 -i 10 -f1 -t1 -p project=gatling -p projectName=gatling-parent-test" #"-wi 5 -i 5 -f1 -t1 -p project=marathon -p projectName=marathon-test" #"-wi 15 -i 5 -f1 -t1 -p project=coursier -p projectName=coursier-repo-test" #"-wi 10 -i 5 -f1 -t1 -p project=prisma -p projectName=root-test" - #"-wi 5 -i 3 -f1 -t1 -p project=cats -p projectName=cats-test" # compiles hot in 3 minutes + # "-wi 5 -i 3 -f1 -t1 -p project=cats -p projectName=cats-test" # compiles hot in 3 minutes #"-wi 2 -i 3 -f1 -t1 -p project=scalding -p projectName=scalding-test" - "-wi 2 -i 3 -f1 -t1 -p project=scio -p projectName=scio+test" + #"-wi 2 -i 3 -f1 -t1 -p project=scio -p projectName=scio+test" ) JAVA_HOMES=( @@ -122,7 +122,7 @@ main() { for java_home in "${JAVA_HOMES[@]}"; do for benchmark in "${SBT_BLOOP_BENCHMARKS[@]}"; do - SBT_COMMANDS+=("$JMH_CMD .*Hot(Bloop|PipelinedBloop|Sbt)Benchmark.* $benchmark -jvm $java_home") + SBT_COMMANDS+=("$JMH_CMD .*Hot(Bloop|Sbt)Benchmark.* $benchmark -jvm $java_home") done done diff --git a/frontend/src/main/scala/bloop/bsp/BloopBspServices.scala b/frontend/src/main/scala/bloop/bsp/BloopBspServices.scala index 7ceab52754..becf0afef7 100644 --- a/frontend/src/main/scala/bloop/bsp/BloopBspServices.scala +++ b/frontend/src/main/scala/bloop/bsp/BloopBspServices.scala @@ -12,7 +12,7 @@ import bloop.io.Environment.lineSeparator import bloop.io.ServerHandle import bloop.util.JavaRuntime import bloop.bsp.BloopBspDefinitions.BloopExtraBuildParams -import bloop.{CompileMode, Compiler, ScalaInstance} +import bloop.{Compiler, ScalaInstance} import bloop.cli.{Commands, ExitStatus, Validate} import bloop.dap.{BloopDebuggeeRunner, DebugServerLogger} import bloop.data.{ClientInfo, JdkConfig, Platform, Project, WorkspaceSettings} diff --git a/frontend/src/main/scala/bloop/data/ClientInfo.scala b/frontend/src/main/scala/bloop/data/ClientInfo.scala index db6ba98eba..4db020a0ab 100644 --- a/frontend/src/main/scala/bloop/data/ClientInfo.scala +++ b/frontend/src/main/scala/bloop/data/ClientInfo.scala @@ -285,10 +285,10 @@ object ClientInfo { val attrs = Files.readAttributes(clientDir.underlying, classOf[BasicFileAttributes]) val isOldDir = attrs.creationTime.toInstant.isBefore(deletionThresholdInstant) - val isWhitelisted = CliClientInfo.isStableDirName(dirName) || + val isAllowed = CliClientInfo.isStableDirName(dirName) || connectedBspClientIds.exists(clientId => dirName.endsWith(s"-$clientId")) - if (isWhitelisted || !isOldDir) () + if (isAllowed || !isOldDir) () else { out.println(s"Deleting orphan directory ${clientDir}") bloop.io.Paths.delete(clientDir) diff --git a/frontend/src/main/scala/bloop/engine/Interpreter.scala b/frontend/src/main/scala/bloop/engine/Interpreter.scala index 3681caa6bc..94aa20bb78 100644 --- a/frontend/src/main/scala/bloop/engine/Interpreter.scala +++ b/frontend/src/main/scala/bloop/engine/Interpreter.scala @@ -1,6 +1,5 @@ package bloop.engine -import bloop.CompileMode import bloop.bsp.BspServer import bloop.cli._ import bloop.cli.completion.{Case, Mode} diff --git a/frontend/src/main/scala/bloop/engine/caches/LastSuccessfulResult.scala b/frontend/src/main/scala/bloop/engine/caches/LastSuccessfulResult.scala index 7651add37b..00bfdf5553 100644 --- a/frontend/src/main/scala/bloop/engine/caches/LastSuccessfulResult.scala +++ b/frontend/src/main/scala/bloop/engine/caches/LastSuccessfulResult.scala @@ -1,7 +1,6 @@ package bloop.engine.caches import bloop.Compiler -import bloop.CompilerOracle import bloop.CompileProducts import bloop.data.Project import bloop.io.AbsolutePath diff --git a/frontend/src/main/scala/bloop/engine/caches/ResultsCache.scala b/frontend/src/main/scala/bloop/engine/caches/ResultsCache.scala index a9a7895106..a339bc0583 100644 --- a/frontend/src/main/scala/bloop/engine/caches/ResultsCache.scala +++ b/frontend/src/main/scala/bloop/engine/caches/ResultsCache.scala @@ -217,9 +217,8 @@ object ResultsCache { val r = PreviousResult.of(Optional.of(res.getAnalysis), Optional.of(res.getMiniSetup)) res.getAnalysis.readCompilations.getAllCompilations.lastOption match { case Some(lastCompilation) => - lastCompilation.getOutput.getSingleOutput.toOption match { - case Some(classesDirFile) => - val classesDir = classesDirFile.toPath + lastCompilation.getOutput.getSingleOutputAsPath.toOption match { + case Some(classesDir) => val originPath = p.origin.path.syntax val originHash = p.origin.hash val inputs = UniqueCompileInputs.emptyFor(originPath) @@ -227,10 +226,8 @@ object ResultsCache { val dummy = ObservedLogger.dummy(logger, ExecutionContext.ioScheduler) val reporter = new LogReporter(p, dummy, cwd, ReporterConfig.defaultFormat) - // TODO: Figure out a way to populate macros from previous run after restart - val ms = new Array[String](0) val products = - CompileProducts(classesDir, classesDir, r, r, Set.empty, Map.empty, ms) + CompileProducts(classesDir, classesDir, r, r, Set.empty, Map.empty) val bundle = ResultBundle( Result.Success(inputs, reporter, products, 0L, dummyTasks, false, false), Some(LastSuccessfulResult(inputs, products, Task.now(()))), diff --git a/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala b/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala index fc03d2b706..13353e84f2 100644 --- a/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala +++ b/frontend/src/main/scala/bloop/engine/tasks/CompileTask.scala @@ -16,7 +16,6 @@ import bloop.{ CompileBackgroundTasks, CompileExceptions, CompileInputs, - CompileMode, CompileOutPaths, CompileProducts, Compiler @@ -87,13 +86,6 @@ object CompileTask { bundle.prepareSourcesAndInstance match { case Left(earlyResultBundle) => - graphInputs.pipelineInputs match { - case None => () - case Some(inputs) => - inputs.irPromise.trySuccess(new Array(0)) - inputs.finishedCompilation.trySuccess(None) - inputs.completeJava.trySuccess(()) - } compileProjectTracer.terminate() Task.now(earlyResultBundle) case Right(CompileSourcesAndInstance(sources, instance, javaOnly)) => @@ -143,7 +135,6 @@ object CompileTask { previousResult, reporter, logger, - configuration.mode, graphInputs.dependentResults, cancelCompilation, compileProjectTracer, @@ -166,9 +157,6 @@ object CompileTask { waitOnReadClassesDir.flatMap { _ => // Only when the task is finished, we kickstart the compilation inputs.flatMap(inputs => Compiler.compile(inputs)).map { result => - // Post-compilation hook to complete/validate pipelining state - runPipeliningBookkeeping(graphInputs, result, javaOnly, logger) - def runPostCompilationTasks( backgroundTasks: CompileBackgroundTasks ): CancelableFuture[Unit] = { @@ -257,7 +245,7 @@ object CompileTask { } val client = state.client - CompileGraph.traverse(dag, client, store, setup(_), compile(_), pipeline).flatMap { pdag => + CompileGraph.traverse(dag, client, store, setup(_), compile(_)).flatMap { pdag => val partialResults = Dag.dfs(pdag) val finalResults = partialResults.map(r => PartialCompileResult.toFinalResult(r)) Task.gatherUnordered(finalResults).map(_.flatten).flatMap { results => @@ -328,61 +316,13 @@ object CompileTask { } } - case class ConfiguredCompilation(mode: CompileMode, scalacOptions: List[String]) + case class ConfiguredCompilation(scalacOptions: List[String]) private def configureCompilation( project: Project, graphInputs: CompileGraph.Inputs, out: CompileOutPaths ): ConfiguredCompilation = { - graphInputs.pipelineInputs match { - case Some(inputs) => - val scalacOptions = project.scalacOptions - val newMode = CompileMode.Pipelined( - inputs.completeJava, - inputs.finishedCompilation, - inputs.transitiveJavaSignal, - graphInputs.oracle, - inputs.separateJavaAndScala - ) - ConfiguredCompilation(newMode, scalacOptions) - case None => - val newMode = CompileMode.Sequential(graphInputs.oracle) - ConfiguredCompilation(newMode, project.scalacOptions) - } - } - - private def runPipeliningBookkeeping( - inputs: CompileGraph.Inputs, - result: Compiler.Result, - javaOnly: Boolean, - logger: Logger - ): Unit = { - val projectName = inputs.bundle.project.name - // Avoid deadlocks in case pipelining is disabled in the Zinc bridge - inputs.pipelineInputs match { - case None => () - case Some(pipelineInputs) => - result match { - case Compiler.Result.NotOk(_) => - // If error, try to set failure in IR promise; if already completed ignore - pipelineInputs.irPromise.tryFailure(CompileExceptions.FailedOrCancelledPromise); () - case result => - // Complete finished compilation promise with products if success or empty - result match { - case s: Compiler.Result.Success => - pipelineInputs.finishedCompilation.success(Some(s.products)) - case Compiler.Result.Empty => - pipelineInputs.finishedCompilation.trySuccess(None) - case _ => - pipelineInputs.finishedCompilation.tryFailure(CompileExceptions.CompletePromise) - } - - val completed = pipelineInputs.irPromise.tryFailure(CompileExceptions.CompletePromise) - if (completed && !javaOnly) { - logger.warn(s"The project $projectName didn't use pipelined compilation.") - } - } - } + ConfiguredCompilation(project.scalacOptions) } private def populateNewReadOnlyClassesDir( diff --git a/frontend/src/main/scala/bloop/engine/tasks/Tasks.scala b/frontend/src/main/scala/bloop/engine/tasks/Tasks.scala index 7114f7955a..a81fb93d82 100644 --- a/frontend/src/main/scala/bloop/engine/tasks/Tasks.scala +++ b/frontend/src/main/scala/bloop/engine/tasks/Tasks.scala @@ -18,6 +18,8 @@ import sbt.internal.inc.classpath.ClasspathUtilities import sbt.testing._ import xsbti.compile.{ClasspathOptionsUtil, CompileAnalysis, MiniSetup, PreviousResult} import bloop.bsp.ScalaTestClasses +import sbt.internal.inc.PlainVirtualFileConverter +import sbt.internal.inc.classpath.ClasspathUtil object Tasks { private[bloop] val TestFailedStatus: Set[Status] = @@ -63,7 +65,8 @@ object Tasks { DebugFilter.All ) val javacBin = project.runtimeJdkConfig.flatMap(_.javacBin) - val loader = ClasspathUtilities.makeLoader(entries, instance) + val pathEntries = entries.map(e => e.toPath()) + val loader = ClasspathUtil.makeLoader(pathEntries, instance) val compiler = state.compilerCache .get(instance, javacBin, project.javacOptions) @@ -71,8 +74,16 @@ object Tasks { .asInstanceOf[AnalyzingCompiler] val opts = ClasspathOptionsUtil.repl val options = project.scalacOptions :+ "-Xnojline" + val converter = PlainVirtualFileConverter.converter // We should by all means add better error handling here! - compiler.console(entries, options, opts, "", "", state.logger)(Some(loader)) + compiler.console( + pathEntries.map(e => converter.toVirtualFile(e)), + converter, + options, + "", + "", + state.logger + )(Some(loader)) case None => logger.error(s"Missing Scala configuration on project '${project.name}'") } diff --git a/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileBundle.scala b/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileBundle.scala index d31ad3a2f2..d6b853e938 100644 --- a/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileBundle.scala +++ b/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileBundle.scala @@ -5,7 +5,7 @@ import bloop.engine.Feedback import bloop.engine.{Dag, ExecutionContext} import bloop.io.{AbsolutePath, Paths} import bloop.io.ByteHasher -import bloop.{Compiler, CompilerOracle, ScalaInstance} +import bloop.{Compiler, ScalaInstance} import bloop.logging.{Logger, ObservedLogger, LoggerAction} import bloop.reporter.{ObservedReporter, ReporterAction} import bloop.tracing.BraveTracer @@ -25,6 +25,7 @@ import xsbti.compile.PreviousResult import scala.concurrent.ExecutionContext import bloop.CompileOutPaths import bloop.cli.CommonOptions +import sbt.internal.inc.PlainVirtualFileConverter sealed trait CompileBundle @@ -62,8 +63,6 @@ case object CancelledCompileBundle extends CompileBundle * dependent projects, which is required to create a full classpath. * @param javaSources A list of Java sources in the project. * @param scalaSources A list of Scala sources in the project. - * @param oracleInputs The compiler oracle inputs are the main input to the - * compilation task called by [[CompileGraph]]. * @param cancelCompilation A promise that can be completed to cancel the compilation. * @param reporter A reporter instance that will register every reporter action * produced by the compilation started by this compile bundle. @@ -189,7 +188,7 @@ object CompileBundle { val sourceHashesTask = tracer.traceTaskVerbose("discovering and hashing sources") { _ => bloop.io.SourceHasher .findAndHashSourcesInProject(project, 20, cancelCompilation, ioScheduler) - .map(res => res.map(_.sortBy(_.source.syntax))) + .map(res => res.map(_.sortBy(_.source.id()))) .executeOn(ioScheduler) } @@ -205,7 +204,7 @@ object CompileBundle { val javaSources = new ListBuffer[AbsolutePath]() val scalaSources = new ListBuffer[AbsolutePath]() sourceHashes.foreach { hashed => - val source = hashed.source + val source = AbsolutePath(PlainVirtualFileConverter.converter.toPath(hashed.source)) val sourceName = source.underlying.getFileName().toString if (sourceName.endsWith(".scala")) { scalaSources += source diff --git a/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileDefinitions.scala b/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileDefinitions.scala index c372fb9497..7585cb268e 100644 --- a/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileDefinitions.scala +++ b/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileDefinitions.scala @@ -5,12 +5,10 @@ import bloop.engine.Dag import bloop.PartialCompileProducts import bloop.CompileProducts import bloop.data.Project -import bloop.CompilerOracle import java.io.File import xsbti.compile.PreviousResult import scala.concurrent.Promise import bloop.JavaSignal -import xsbti.compile.Signature object CompileDefinitions { type ProjectId = String @@ -25,11 +23,4 @@ object CompileDefinitions { dependentProducts: Map[Project, BundleProducts] ) - case class PipelineInputs( - irPromise: Promise[Array[Signature]], - finishedCompilation: Promise[Option[CompileProducts]], - completeJava: Promise[Unit], - transitiveJavaSignal: Task[JavaSignal], - separateJavaAndScala: Boolean - ) } diff --git a/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileGraph.scala b/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileGraph.scala index c3b9494b2a..e13726936e 100644 --- a/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileGraph.scala +++ b/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileGraph.scala @@ -15,9 +15,8 @@ import bloop.util.SystemProperties import bloop.engine.{Dag, Leaf, Parent, Aggregate, ExecutionContext} import bloop.reporter.ReporterAction import bloop.logging.{Logger, ObservedLogger, LoggerAction, DebugFilter} -import bloop.{Compiler, CompilerOracle, JavaSignal, CompileProducts} +import bloop.{Compiler, JavaSignal, CompileProducts} import bloop.engine.caches.LastSuccessfulResult -import bloop.UniqueCompileInputs import bloop.PartialCompileProducts import bloop.engine.tasks.compilation.CompileDefinitions.CompileTraversal @@ -30,7 +29,6 @@ import xsbti.compile.PreviousResult import scala.concurrent.Promise import scala.util.{Failure, Success} -import xsbti.compile.Signature import scala.collection.mutable import java.{util => ju} import bloop.CompileOutPaths @@ -47,39 +45,14 @@ object CompileGraph { case class Inputs( bundle: SuccessfulCompileBundle, - oracle: CompilerOracle, - pipelineInputs: Option[PipelineInputs], dependentResults: Map[File, PreviousResult] ) - /** - * Turns a dag of projects into a task that returns a dag of compilation results - * that can then be used to debug the evaluation of the compilation within Monix - * and access the compilation results received from Zinc. - * - * @param dag The dag of projects to be compiled. - * @return A task that returns a dag of compilation results. - */ - def traverse( - dag: Dag[Project], - client: ClientInfo, - store: CompileClientStore, - setup: BundleInputs => Task[CompileBundle], - compile: Inputs => Task[ResultBundle], - pipeline: Boolean - ): CompileTraversal = { - /* We use different traversals for normal and pipeline compilation because the - * pipeline traversal has an small overhead (2-3%) for some projects. Check - * https://benchs.scala-lang.org/dashboard/snapshot/sLrZTBfntTxMWiXJPtIa4DIrmT0QebYF */ - if (pipeline) pipelineTraversal(dag, client, store, setup, compile) - else normalTraversal(dag, client, store, setup, compile) - } - private final val JavaContinue = Task.now(JavaSignal.ContinueCompilation) private def partialSuccess( bundle: SuccessfulCompileBundle, result: ResultBundle - ): PartialSuccess = PartialSuccess(bundle, None, Task.now(result)) + ): PartialSuccess = PartialSuccess(bundle, Task.now(result)) private def blockedBy(dag: Dag[PartialCompileResult]): Option[Project] = { def blockedFromResults(results: List[PartialCompileResult]): Option[Project] = { @@ -294,7 +267,7 @@ object CompileGraph { */ val obtainResultFromDeduplication = runningCompilationTask.map { results => PartialCompileResult.mapEveryResult(results) { - case s @ PartialSuccess(bundle, _, compilerResult) => + case s @ PartialSuccess(bundle, compilerResult) => val newCompilerResult = compilerResult.flatMap { results => results.fromCompiler match { case s: Compiler.Result.Success => @@ -404,14 +377,14 @@ object CompileGraph { import scala.collection.mutable /** - * Traverses the dag of projects in a normal way. + * Turns a dag of projects into a task that returns a dag of compilation results + * that can then be used to debug the evaluation of the compilation within Monix + * and access the compilation results received from Zinc. * - * @param dag is the dag of projects. - * @param computeBundle is the function that sets up the project on every node. - * @param compile is the task we use to compile on every node. + * @param dag The dag of projects to be compiled. * @return A task that returns a dag of compilation results. */ - private def normalTraversal( + def traverse( dag: Dag[Project], client: ClientInfo, store: CompileClientStore, @@ -444,8 +417,7 @@ object CompileGraph { case Leaf(project) => val bundleInputs = BundleInputs(project, dag, Map.empty) setupAndDeduplicate(client, bundleInputs, computeBundle) { bundle => - val oracle = new SimpleOracle - compile(Inputs(bundle, oracle, None, Map.empty)).map { results => + compile(Inputs(bundle, Map.empty)).map { results => results.fromCompiler match { case Compiler.Result.Ok(_) => Leaf(partialSuccess(bundle, results)) case _ => Leaf(toPartialFailure(bundle, results)) @@ -493,8 +465,7 @@ object CompileGraph { val resultsMap = dependentResults.toMap val bundleInputs = BundleInputs(project, dag, dependentProducts.toMap) setupAndDeduplicate(client, bundleInputs, computeBundle) { bundle => - val oracle = new SimpleOracle - val inputs = Inputs(bundle, oracle, None, resultsMap) + val inputs = Inputs(bundle, resultsMap) compile(inputs).map { results => results.fromCompiler match { case Compiler.Result.Ok(_) => @@ -514,233 +485,6 @@ object CompileGraph { loop(dag) } - /** - * Traverses the dag of projects in such a way that allows compilation pipelining. - * - * Note that to use build pipelining, the compilation task needs to have a pipelining - * implementation where the pickles are generated and the promise in [[Inputs]] completed. - * - * @param dag is the dag of projects. - * @param computeBundle is the function that sets up the project on every node. - * @param compile is the function that compiles every node, returning a Task. - * @return A task that returns a dag of compilation results. - */ - private def pipelineTraversal( - dag: Dag[Project], - client: ClientInfo, - store: CompileClientStore, - computeBundle: BundleInputs => Task[CompileBundle], - compile: Inputs => Task[ResultBundle] - ): CompileTraversal = { - val tasks = new scala.collection.mutable.HashMap[Dag[Project], CompileTraversal]() - def register(k: Dag[Project], v: CompileTraversal): CompileTraversal = { - val toCache = store.findPreviousTraversalOrAddNew(k, v).getOrElse(v) - tasks.put(k, toCache) - toCache - } - - def loop(dag: Dag[Project]): CompileTraversal = { - tasks.get(dag) match { - case Some(task) => task - case None => - val task = dag match { - case Leaf(project) => - Task.now(Promise[Array[Signature]]()).flatMap { cf => - val bundleInputs = BundleInputs(project, dag, Map.empty) - setupAndDeduplicate(client, bundleInputs, computeBundle) { bundle => - val jcf = Promise[Unit]() - val end = Promise[Option[CompileProducts]]() - val noSigs = new Array[Signature](0) - val noDefinedMacros = Map.empty[Project, Array[String]] - val oracle = new PipeliningOracle(bundle, noSigs, noDefinedMacros, cf, Nil) - val pipelineInputs = PipelineInputs(cf, end, jcf, JavaContinue, true) - val t = compile(Inputs(bundle, oracle, Some(pipelineInputs), Map.empty)) - val running = - Task.fromFuture(t.executeWithFork.runAsync(ExecutionContext.scheduler)) - val completeJava = Task - .deferFuture(end.future) - .executeOn(ExecutionContext.ioScheduler) - .materialize - .map { - case Success(_) => JavaSignal.ContinueCompilation - case Failure(_) => JavaSignal.FailFastCompilation(bundle.project.name) - } - .memoize - - Task - .deferFuture(cf.future) - .executeOn(ExecutionContext.ioScheduler) - .materialize - .map { upstream => - val ms = oracle.collectDefinedMacroSymbols - Leaf( - PartialCompileResult(bundle, upstream, end, jcf, completeJava, ms, running) - ) - } - } - } - - case Aggregate(dags) => - val downstream = dags.map(loop) - Task.gatherUnordered(downstream).flatMap { dagResults => - Task.now(Parent(PartialEmpty, dagResults)) - } - - case Parent(project, dependencies) => - val downstream = dependencies.map(loop) - Task.gatherUnordered(downstream).flatMap { dagResults => - val failed = dagResults.flatMap(dag => blockedBy(dag).toList) - if (failed.nonEmpty) { - // Register the name of the projects we're blocked on (intransitively) - val blockedResult = Compiler.Result.Blocked(failed.map(_.name)) - val blocked = Task.now(ResultBundle(blockedResult, None, None)) - Task.now(Parent(PartialFailure(project, BlockURI, blocked), dagResults)) - } else { - val results: List[PartialSuccess] = { - val transitive = dagResults.flatMap(Dag.dfs(_)).distinct - transitive.collect { case s: PartialSuccess => s } - } - - val failedPipelineProjects = new mutable.ListBuffer[Project]() - val pipelinedJavaSignals = new mutable.ListBuffer[Task[JavaSignal]]() - val transitiveSignatures = new ju.LinkedHashMap[String, Signature]() - val resultsToBlockOn = new mutable.ListBuffer[Task[(Project, ResultBundle)]]() - val pipelinedDependentProducts = - new mutable.ListBuffer[(Project, BundleProducts)]() - - results.foreach { ps => - val project = ps.bundle.project - ps.pipeliningResults match { - case None => resultsToBlockOn.+=(ps.result.map(r => project -> r)) - case Some(results) => - pipelinedJavaSignals.+=(results.shouldAttemptJavaCompilation) - val signatures = results.signatures - signatures.foreach { signature => - // Don't register if sig for name exists, signature lookup order is DFS - if (!transitiveSignatures.containsKey(signature.name())) - transitiveSignatures.put(signature.name(), signature) - } - - val products = results.productsWhenCompilationIsFinished - val result = products.future.value match { - case Some(Success(products)) => - products match { - case Some(products) => - // Add finished compile products when compilation is finished - pipelinedDependentProducts.+=(project -> Right(products)) - case None => () - } - case Some(Failure(t)) => - // Log if error when computing pipelining results and add to failure - ps.bundle.logger.trace(t) - failedPipelineProjects.+=(project) - case None => - val out = ps.bundle.out - val pipeliningResult = Left( - PartialCompileProducts( - out.internalReadOnlyClassesDir, - out.internalNewClassesDir, - results.definedMacros - ) - ) - pipelinedDependentProducts.+=(project -> pipeliningResult) - } - } - } - - if (failedPipelineProjects.nonEmpty) { - // If any project failed to pipeline, abort compilation with blocked result - val failed = failedPipelineProjects.toList - val blockedResult = Compiler.Result.Blocked(failed.map(_.name)) - val blocked = Task.now(ResultBundle(blockedResult, None, None)) - Task.now(Parent(PartialFailure(project, BlockURI, blocked), dagResults)) - } else { - // Get the compilation result of those projects which were not pipelined - Task.gatherUnordered(resultsToBlockOn.toList).flatMap { nonPipelineResults => - var nonPipelinedDependentProducts = - new mutable.ListBuffer[(Project, BundleProducts)]() - var nonPipelinedDependentResults = - new mutable.ListBuffer[(File, PreviousResult)]() - nonPipelineResults.foreach { - case (p, ResultBundle(s: Compiler.Result.Success, _, _, _)) => - val newProducts = s.products - nonPipelinedDependentProducts.+=(p -> Right(newProducts)) - val newResult = newProducts.resultForDependentCompilationsInSameRun - nonPipelinedDependentResults - .+=(newProducts.newClassesDir.toFile -> newResult) - .+=(newProducts.readOnlyClassesDir.toFile -> newResult) - case _ => () - } - - val projectResultsMap = - (pipelinedDependentProducts.iterator ++ nonPipelinedDependentProducts.iterator).toMap - val allMacros = projectResultsMap - .mapValues(_.fold(_.definedMacroSymbols, _.definedMacroSymbols)) - val allSignatures = { - import scala.collection.JavaConverters._ - // Order of signatures matters (e.g. simulates classpath lookup) - transitiveSignatures.values().iterator().asScala.toArray - } - - val bundleInputs = BundleInputs(project, dag, projectResultsMap) - setupAndDeduplicate(client, bundleInputs, computeBundle) { bundle => - // Signals whether java compilation can proceed or not - val javaSignals = aggregateJavaSignals(pipelinedJavaSignals.toList) - Task.now(Promise[Array[Signature]]()).flatMap { cf => - val jf = Promise[Unit]() - val end = Promise[Option[CompileProducts]]() - val oracle = - new PipeliningOracle(bundle, allSignatures, allMacros, cf, results) - val pipelineInputs = PipelineInputs(cf, end, jf, javaSignals, true) - val t = compile( - Inputs( - bundle, - oracle, - Some(pipelineInputs), - // Pass incremental results for only those projects that were not pipelined - nonPipelinedDependentResults.toMap - ) - ) - - val running = t.executeWithFork.runAsync(ExecutionContext.scheduler) - val ongoing = Task.fromFuture(running) - val cj = { - Task - .deferFuture(end.future) - .executeOn(ExecutionContext.ioScheduler) - .materialize - .map { - case Success(_) => JavaSignal.ContinueCompilation - case Failure(_) => JavaSignal.FailFastCompilation(project.name) - } - }.memoize // Important to memoize this task for performance reasons - - Task - .deferFuture(cf.future) - .executeOn(ExecutionContext.ioScheduler) - .materialize - .map { upstream => - val ms = oracle.collectDefinedMacroSymbols - Parent( - PartialCompileResult(bundle, upstream, end, jf, cj, ms, ongoing), - dagResults - ) - } - } - } - } - } - } - } - } - - register(dag, task.memoize) - } - } - - loop(dag) - } - private def aggregateJavaSignals(xs: List[Task[JavaSignal]]): Task[JavaSignal] = { Task .gatherUnordered(xs) diff --git a/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileResult.scala b/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileResult.scala index dc463c6d89..ef95a0550c 100644 --- a/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileResult.scala +++ b/frontend/src/main/scala/bloop/engine/tasks/compilation/CompileResult.scala @@ -11,7 +11,6 @@ import monix.execution.CancelableFuture import scala.util.Try import scala.concurrent.Promise -import xsbti.compile.Signature sealed trait CompileResult[+R] { def result: R @@ -22,31 +21,6 @@ sealed trait PartialCompileResult extends CompileResult[Task[ResultBundle]] { } object PartialCompileResult { - def apply( - bundle: SuccessfulCompileBundle, - pipelineAttempt: Try[Array[Signature]], - futureProducts: Promise[Option[CompileProducts]], - hasJavacCompleted: Promise[Unit], - shouldCompileJava: Task[JavaSignal], - definedMacroSymbols: Array[String], - result: Task[ResultBundle] - ): PartialCompileResult = { - pipelineAttempt match { - case scala.util.Success(sigs) => - val pipeline = PipelineResults( - sigs, - definedMacroSymbols, - futureProducts, - hasJavacCompleted, - shouldCompileJava - ) - PartialSuccess(bundle, Some(pipeline), result) - case scala.util.Failure(CompileExceptions.CompletePromise) => - PartialSuccess(bundle, None, result) - case scala.util.Failure(t) => - PartialFailure(bundle.project, t, result) - } - } def mapEveryResult( results: Dag[PartialCompileResult] @@ -71,7 +45,7 @@ object PartialCompileResult { bundle.map(b => FinalNormalCompileResult(project, b) :: Nil) case PartialFailures(failures, _) => Task.gatherUnordered(failures.map(toFinalResult(_))).map(_.flatten) - case PartialSuccess(bundle, _, result) => + case PartialSuccess(bundle, result) => result.map(res => FinalNormalCompileResult(bundle.project, res) :: Nil) } } @@ -97,19 +71,10 @@ case class PartialFailures( case class PartialSuccess( bundle: SuccessfulCompileBundle, - pipeliningResults: Option[PipelineResults], result: Task[ResultBundle] ) extends PartialCompileResult with CacheHashCode -case class PipelineResults( - signatures: Array[Signature], - definedMacros: Array[String], - productsWhenCompilationIsFinished: Promise[Option[CompileProducts]], - isJavaCompilationFinished: Promise[Unit], - shouldAttemptJavaCompilation: Task[JavaSignal] -) - sealed trait FinalCompileResult extends CompileResult[ResultBundle] { def result: ResultBundle } diff --git a/frontend/src/main/scala/bloop/engine/tasks/compilation/PipeliningOracle.scala b/frontend/src/main/scala/bloop/engine/tasks/compilation/PipeliningOracle.scala deleted file mode 100644 index 44251aa037..0000000000 --- a/frontend/src/main/scala/bloop/engine/tasks/compilation/PipeliningOracle.scala +++ /dev/null @@ -1,150 +0,0 @@ -package bloop.engine.tasks.compilation - -import java.io.File - -import bloop.data.Project -import bloop.{Compiler, CompilerOracle} -import bloop.engine.ExecutionContext -import bloop.io.AbsolutePath -import bloop.ScalaSig -import bloop.logging.Logger -import bloop.tracing.BraveTracer - -import scala.concurrent.Promise -import scala.collection.mutable - -import monix.eval.Task -import xsbti.compile.Signature -import monix.execution.atomic.AtomicBoolean -import scala.concurrent.Await -import scala.concurrent.duration.Duration -import monix.execution.misc.NonFatal - -/** @inheritdoc */ -final class PipeliningOracle( - bundle: CompileBundle, - signaturesFromRunningCompilations: Array[Signature], - definedMacrosFromRunningCompilations: Map[Project, Array[String]], - startDownstreamCompilation: Promise[Array[Signature]], - scheduledCompilations: List[PartialSuccess] -) extends CompilerOracle { - - /** @inheritdoc */ - override def askForJavaSourcesOfIncompleteCompilations: List[File] = { - scheduledCompilations.flatMap { r => - r.pipeliningResults match { - case None => Nil - case Some(results) => - if (results.isJavaCompilationFinished.isCompleted) Nil - else r.bundle.javaSources.map(_.toFile) - } - } - } - - private val definedMacros = new mutable.HashSet[String]() - - /** @inheritdoc */ - def registerDefinedMacro(definedMacroSymbol: String): Unit = definedMacros.+=(definedMacroSymbol) - - /** @inheritdoc */ - def collectDefinedMacroSymbols: Array[String] = definedMacros.toArray - - /** @inheritdoc */ - @volatile private var requiresMacroInitialization: Boolean = false - def blockUntilMacroClasspathIsReady(usedMacroSymbol: String): Unit = { - if (requiresMacroInitialization) () - else { - val noMacrosDefinedInDependentProjects = { - definedMacrosFromRunningCompilations.isEmpty || - definedMacrosFromRunningCompilations.forall(_._2.isEmpty) - } - - if (noMacrosDefinedInDependentProjects) { - requiresMacroInitialization = true - } else { - // Only return promises for those projects that define any macros - val dependentProjectPromises = scheduledCompilations.flatMap { r => - r.pipeliningResults match { - case None => Nil - case Some(results) => - val hasNoMacros = { - val macros = definedMacrosFromRunningCompilations.get(r.bundle.project) - macros.isEmpty || macros.exists(_.isEmpty) - } - if (hasNoMacros) Nil - else List(Task.deferFuture(results.productsWhenCompilationIsFinished.future)) - } - } - - val waitDownstreamFullCompilations = { - Task - .sequence(dependentProjectPromises) - .map(_ => ()) - .runAsync(ExecutionContext.ioScheduler) - } - - /** - * Block until all the downstream compilations have completed. - * - * We have a guarantee from bloop that these promises will be always - * completed even if their associated compilations fail or are - * cancelled. In any of this scenario, and even if we throw on this - * wait, we catch it and let the compiler logic handle it. If the user - * has cancelled this compilation as well, the compiler logic will - * exit. If the compilation downstream failed, this compilation will - * fail too because supposedly it accesses macros defined downstream. - * Failing here it's fine. - */ - try Await.result(waitDownstreamFullCompilations, Duration.Inf) - catch { case NonFatal(e) => () } - finally { - requiresMacroInitialization = true - } - } - } - } - - /** @inheritdoc */ - def isPipeliningEnabled: Boolean = !startDownstreamCompilation.isCompleted - - /** @inheritdoc */ - def startDownstreamCompilations(signatures: Array[Signature]): Unit = { - startDownstreamCompilation.success(signatures) - } - - /** @inheritdoc */ - def collectDownstreamSignatures(): Array[Signature] = signaturesFromRunningCompilations -} - -object PipeliningOracle { - - /** - * Persists in-memory signatures to a pickles directory associated with the - * target that producted them. - * - * For the moment, this logic is unused in favor of an in-memory populating - * strategy via the analysis callback endpoint `downstreamSignatures`. - */ - def writeSignaturesToPicklesDir( - picklesDir: AbsolutePath, - signatures: List[Signature], - startDownstreamCompilation: Promise[Unit], - tracer: BraveTracer, - logger: Logger - ): Unit = { - val writePickles = signatures.map(ScalaSig.write(picklesDir, _, logger)) - val groupTasks = writePickles.grouped(4).map(group => Task.gatherUnordered(group)).toList - val persistPicklesInParallel = { - tracer.traceTask("writing pickles") { _ => - Task.sequence(groupTasks).doOnFinish { - case None => Task.now { startDownstreamCompilation.trySuccess(()); () } - case Some(t) => Task.now { startDownstreamCompilation.tryFailure(t); () } - } - } - } - - // Think strategies to get a hold of this future or cancel it if compilation is cancelled - persistPicklesInParallel.runAsync(ExecutionContext.ioScheduler) - () - } -} diff --git a/frontend/src/main/scala/bloop/engine/tasks/compilation/SimpleOracle.scala b/frontend/src/main/scala/bloop/engine/tasks/compilation/SimpleOracle.scala deleted file mode 100644 index 789d0b6f21..0000000000 --- a/frontend/src/main/scala/bloop/engine/tasks/compilation/SimpleOracle.scala +++ /dev/null @@ -1,20 +0,0 @@ -package bloop.engine.tasks.compilation - -import bloop.CompilerOracle -import java.io.File -import bloop.ScalaSig -import bloop.io.AbsolutePath -import xsbti.compile.Signature -import scala.collection.mutable - -final class SimpleOracle extends CompilerOracle { - def blockUntilMacroClasspathIsReady(usedMacroSymbol: String): Unit = () - def askForJavaSourcesOfIncompleteCompilations: List[File] = Nil - def isPipeliningEnabled: Boolean = false - def collectDownstreamSignatures: Array[Signature] = new Array[Signature](0) - def startDownstreamCompilations(sigs: Array[Signature]): Unit = () - - private val definedMacros = new mutable.HashSet[String]() - def registerDefinedMacro(definedMacroSymbol: String): Unit = definedMacros.+=(definedMacroSymbol) - def collectDefinedMacroSymbols: Array[String] = definedMacros.toArray -} diff --git a/frontend/src/main/scala/bloop/io/SourceHasher.scala b/frontend/src/main/scala/bloop/io/SourceHasher.scala index 0a6235a335..0f2e655977 100644 --- a/frontend/src/main/scala/bloop/io/SourceHasher.scala +++ b/frontend/src/main/scala/bloop/io/SourceHasher.scala @@ -19,7 +19,6 @@ import scala.collection.mutable import scala.concurrent.Promise import bloop.data.Project -import bloop.CompilerOracle import bloop.engine.ExecutionContext import bloop.util.monix.FoldLeftAsyncConsumer import bloop.UniqueCompileInputs.HashedSource @@ -32,6 +31,8 @@ import monix.reactive.internal.operators.MapAsyncParallelObservable import monix.execution.Cancelable import monix.execution.cancelables.CompositeCancelable +import sbt.internal.inc.PlainVirtualFileConverter + object SourceHasher { private final val sourceMatcher = FileSystems.getDefault.getPathMatcher("glob:**/[!.]*.{scala,java}") @@ -142,7 +143,7 @@ object SourceHasher { val hashSourcesInParallel = observable.mapAsync(parallelUnits) { (source: Path) => Task.eval { val hash = ByteHasher.hashFileContents(source.toFile) - HashedSource(AbsolutePath(source), hash) + HashedSource(PlainVirtualFileConverter.converter.toVirtualFile(source), hash) } } diff --git a/frontend/src/main/scala/bloop/reporter/BspProjectReporter.scala b/frontend/src/main/scala/bloop/reporter/BspProjectReporter.scala index bf5a51739c..a887a0889e 100644 --- a/frontend/src/main/scala/bloop/reporter/BspProjectReporter.scala +++ b/frontend/src/main/scala/bloop/reporter/BspProjectReporter.scala @@ -17,6 +17,7 @@ import scala.util.Try import scala.concurrent.Promise import bloop.CompileOutPaths import monix.execution.atomic.AtomicInt +import xsbti.VirtualFile final class BspProjectReporter( val project: Project, @@ -163,7 +164,11 @@ final class BspProjectReporter( ) } - override def reportStartIncrementalCycle(sources: Seq[File], outputDirs: Seq[File]): Unit = { + override def reportStartIncrementalCycle( + sources: Seq[VirtualFile], + outputDirs: Seq[File] + ): Unit = { + val plainFiles = sources.map(converter.toPath(_).toFile()) cycleCount.incrementAndGet() statusForNextEndCycle match { @@ -174,11 +179,11 @@ final class BspProjectReporter( case None => () } - val msg = Reporter.compilationMsgFor(project.name, sources) + val msg = Reporter.compilationMsgFor(project.name, plainFiles) logger.publishCompilationStart( CompilationEvent.StartCompilation(project.name, project.bspUri, msg, taskId) ) - compilingFiles ++ sources + compilingFiles ++ plainFiles } private def clearProblemsAtPhase( diff --git a/frontend/src/main/scala/bloop/reporter/LogReporter.scala b/frontend/src/main/scala/bloop/reporter/LogReporter.scala index 2b5be857da..d838ac256c 100644 --- a/frontend/src/main/scala/bloop/reporter/LogReporter.scala +++ b/frontend/src/main/scala/bloop/reporter/LogReporter.scala @@ -14,6 +14,8 @@ import sbt.util.InterfaceUtil import scala.collection.mutable import scala.collection.concurrent.TrieMap import bloop.logging.CompilationEvent +import xsbti.VirtualFile +import bloop.util.AnalysisUtils final class LogReporter( val project: Project, @@ -64,11 +66,15 @@ final class LogReporter( () } - override def reportStartIncrementalCycle(sources: Seq[File], outputDirs: Seq[File]): Unit = { + override def reportStartIncrementalCycle( + sources: Seq[VirtualFile], + outputDirs: Seq[File] + ): Unit = { // TODO(jvican): Fix https://github.com/scalacenter/bloop/issues/386 here require(sources.size > 0) // This is an invariant enforced in the call-site - compilingFiles ++ sources - logger.info(Reporter.compilationMsgFor(project.name, sources)) + val plainFiles = sources.map(converter.toPath(_).toFile()) + compilingFiles ++ plainFiles + logger.info(Reporter.compilationMsgFor(project.name, plainFiles)) } override def reportEndIncrementalCycle(durationMs: Long, result: scala.util.Try[Unit]): Unit = { diff --git a/frontend/src/test/scala/bloop/BaseCompileSpec.scala b/frontend/src/test/scala/bloop/BaseCompileSpec.scala index 03b220243f..1c3e2fa8a4 100644 --- a/frontend/src/test/scala/bloop/BaseCompileSpec.scala +++ b/frontend/src/test/scala/bloop/BaseCompileSpec.scala @@ -815,10 +815,14 @@ abstract class BaseCompileSpec extends bloop.testing.BaseSuite { | cannot find symbol | symbol: class Bar | location: class Foo + | L3: Bar + | ^^^ |[E1] ${targetFoo}:3 | cannot find symbol | symbol: class Bar | location: class Foo + | L3: Bar + | ^^^ |${targetFoo}: L3 [E1], L3 [E2] |""".stripMargin } else { @@ -1065,29 +1069,17 @@ abstract class BaseCompileSpec extends bloop.testing.BaseSuite { s"""[E1] ${targetB}:1 | cannot find symbol | symbol: class A - |${targetB}: L1 [E1]""".stripMargin - } - - val cannotFindSymbolError2: String = { - s"""[E1] ${targetB}:1 - | error: cannot find symbol + | L1: A + | ^ |${targetB}: L1 [E1]""".stripMargin } assertDiagnosticsResult(compiledState.getLastResultFor(`A`), 1) import bloop.testing.DiffAssertions - try { - assertNoDiff( - logger.renderErrors(exceptContaining = "Failed to compile"), - cannotFindSymbolError - ) - } catch { - case _: DiffAssertions.TestFailedException => - assertNoDiff( - logger.renderErrors(exceptContaining = "Failed to compile"), - cannotFindSymbolError2 - ) - } + assertNoDiff( + logger.renderErrors(exceptContaining = "Failed to compile"), + cannotFindSymbolError + ) } } diff --git a/frontend/src/test/scala/bloop/BuildPipeliningSpec.scala b/frontend/src/test/scala/bloop/BuildPipeliningSpec.scala deleted file mode 100644 index f1b66306ce..0000000000 --- a/frontend/src/test/scala/bloop/BuildPipeliningSpec.scala +++ /dev/null @@ -1,159 +0,0 @@ -package bloop - -import bloop.io.{AbsolutePath, RelativePath, Paths => BloopPaths} -import bloop.io.Environment.lineSeparator -import bloop.logging.RecordingLogger -import bloop.cli.{Commands, ExitStatus} -import bloop.engine.{Feedback, Run, State} -import bloop.util.{TestProject, TestUtil} - -import java.nio.file.Files -import java.util.concurrent.TimeUnit - -import scala.concurrent.Await -import scala.concurrent.duration.Duration -import scala.concurrent.duration.FiniteDuration -import bloop.engine.ExecutionContext - -object BuildPipeliningSpec extends bloop.testing.BaseSuite { - test("compile simple build") { - TestUtil.withinWorkspace { workspace => - object Sources { - val `A.scala` = - """/A.scala - |package a - |class A - """.stripMargin - val `B.scala` = - """/B.scala - |package b - |class B extends a.A - """.stripMargin - val `C.scala` = - """/C.scala - |package c - |class C extends b.B - """.stripMargin - val `D.scala` = - """/D.scala - |package d - |class D extends c.C - """.stripMargin - val `E.scala` = - """/E.scala - |package e - |class E extends d.D - """.stripMargin - } - - val logger = new RecordingLogger(ansiCodesSupported = false) - val `A` = TestProject(workspace, "a", List(Sources.`A.scala`)) - val `B` = TestProject(workspace, "b", List(Sources.`B.scala`), List(`A`)) - val `C` = TestProject(workspace, "c", List(Sources.`C.scala`), List(`B`)) - val `D` = TestProject(workspace, "d", List(Sources.`D.scala`), List(`C`)) - val `E` = TestProject(workspace, "e", List(Sources.`E.scala`), List(`D`)) - val projects = List(`A`, `B`, `C`, `D`) - val state = loadState(workspace, projects, logger) - val compiledState = state.compileWithPipelining(`D`) - assert(compiledState.status == ExitStatus.Ok) - assertValidCompilationState(compiledState, projects) - } - } - - testOnlyOnJava8("compile simple build using Scala 2.10 (without pipelining)") { - TestUtil.withinWorkspace { workspace => - object Sources { - val `A.scala` = - """/A.scala - |class A - """.stripMargin - val `B.scala` = - """/B.scala - |class B extends A - """.stripMargin - } - - val logger = new RecordingLogger(ansiCodesSupported = false) - val jars = { - ScalaInstance - .resolve("org.scala-lang", "scala-compiler", "2.10.7", logger)( - ExecutionContext.ioScheduler - ) - .allJars - .map(AbsolutePath(_)) - } - - val scalaV = Some("2.10.7") - val `A` = TestProject( - workspace, - "a", - List(Sources.`A.scala`), - scalaVersion = scalaV, - jars = jars - ) - - val `B` = TestProject( - workspace, - "b", - List(Sources.`B.scala`), - List(`A`), - scalaVersion = scalaV, - jars = jars - ) - - val projects = List(`A`, `B`) - val state = loadState(workspace, projects, logger) - val compiledState = state.compileWithPipelining(`B`) - assert(compiledState.status == ExitStatus.Ok) - assertValidCompilationState(compiledState, projects) - - assertNoDiff( - logger.renderTimeInsensitiveInfos, - """|Compiling a (1 Scala source) - |Compiled a ??? - |Compiling b (1 Scala source) - |Compiled b ??? - |""".stripMargin - ) - - assertNoDiff( - logger.warnings.mkString(lineSeparator), - """|The project a didn't use pipelined compilation. - |The project b didn't use pipelined compilation. - |""".stripMargin - ) - } - } - - test("pipelining makes Java wait on upstream Scala compiles") { - TestUtil.withinWorkspace { workspace => - object Sources { - val `A.scala` = - """/A.scala - |class A - """.stripMargin - val `B.scala` = - """/B.scala - |class B extends A - """.stripMargin - val `C.java` = - """/C.java - |public class C extends B {} - """.stripMargin - } - - val logger = new RecordingLogger(ansiCodesSupported = false) - val `A` = TestProject(workspace, "a", List(Sources.`A.scala`, Sources.`B.scala`)) - // A project in the middle of the dependency graph with no sources - val `B` = TestProject(workspace, "b", Nil, List(`A`)) - val `C` = TestProject(workspace, "c", List(Sources.`C.java`), List(`B`)) - - val projects = List(`A`, `B`, `C`) - val state = loadState(workspace, projects, logger) - val compiledState = state.compileWithPipelining(`C`) - assert(compiledState.status == ExitStatus.Ok) - // Only check valid state in `A` and `C` because `B` is empty! - assertValidCompilationState(compiledState, List(`A`, `C`)) - } - } -} diff --git a/frontend/src/test/scala/bloop/io/SourcesGlobsSpec.scala b/frontend/src/test/scala/bloop/io/SourcesGlobsSpec.scala index df14b4a147..44a1364bd0 100644 --- a/frontend/src/test/scala/bloop/io/SourcesGlobsSpec.scala +++ b/frontend/src/test/scala/bloop/io/SourcesGlobsSpec.scala @@ -12,6 +12,7 @@ import java.util.concurrent.TimeUnit import bloop.cli.ExitStatus import bloop.config.Config import bloop.data.SourcesGlobs +import sbt.internal.inc.PlainVirtualFileConverter object SourcesGlobsSpec extends bloop.testing.BaseSuite { @@ -58,13 +59,14 @@ object SourcesGlobsSpec extends bloop.testing.BaseSuite { val Right(result) = TestUtil.await(10, TimeUnit.SECONDS)(hashedSources) import scala.collection.JavaConverters._ val obtainedFilenames = result - .map( - _.source + .map { file => + val path = AbsolutePath(PlainVirtualFileConverter.converter.toPath(file.source)) + path .toRelative(globDirectory) .toUri(isDirectory = false) .toString() .stripPrefix("globs/src/") - ) + } .sorted .mkString("\n") assertNoDiff(obtainedFilenames, expectedFilenames) diff --git a/integrations/maven-bloop/src/test/scala/bloop/integrations/maven/MavenConfigGenerationSuite.scala b/integrations/maven-bloop/src/test/scala/bloop/integrations/maven/MavenConfigGenerationSuite.scala index 6a6db149fb..de9cae650c 100644 --- a/integrations/maven-bloop/src/test/scala/bloop/integrations/maven/MavenConfigGenerationSuite.scala +++ b/integrations/maven-bloop/src/test/scala/bloop/integrations/maven/MavenConfigGenerationSuite.scala @@ -226,6 +226,7 @@ class MavenConfigGenerationSuite extends BaseConfigSuite { } checking(configFile, projectName, subProjects) tempDir.toFile().delete() + () } catch { case NonFatal(e) => println("Maven output:\n" + result) @@ -248,7 +249,7 @@ class MavenConfigGenerationSuite extends BaseConfigSuite { private def exec(cmd: Seq[String], cwd: File): Try[String] = { Try { val lastError = new StringBuilder - val swallowStderr = ProcessLogger(_ => (), err => lastError.append(err)) + val swallowStderr = ProcessLogger(_ => (), err => { lastError.append(err); () }) val processBuilder = new ProcessBuilder() val out = new StringBuilder() processBuilder.directory(cwd) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 8970fc8c63..ebb3712af0 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -14,7 +14,7 @@ object Dependencies { val nailgunCommit = "a2520c1e" // Keep in sync in BloopComponentCompiler - val zincVersion = "1.3.0-M4+47-d881fa2f" + val zincVersion = "1.6.0" val bspVersion = "2.0.0-M13" val javaDebugVersion = "0.21.0+1-7f1080f1" @@ -58,7 +58,7 @@ object Dependencies { val debugAdapterVersion = "2.0.13" import sbt.librarymanagement.syntax.stringToOrganization - val zinc = "ch.epfl.scala" %% "zinc" % zincVersion + val zinc = "org.scala-sbt" %% "zinc" % zincVersion val bsp4s = "ch.epfl.scala" %% "bsp4s" % bspVersion val bsp4j = "ch.epfl.scala" % "bsp4j" % bspVersion val nailgun = "ch.epfl.scala" % "nailgun-server" % nailgunVersion diff --git a/zinc b/zinc deleted file mode 160000 index d881fa2feb..0000000000 --- a/zinc +++ /dev/null @@ -1 +0,0 @@ -Subproject commit d881fa2feb68b74c9c5afd1b8f62935a4e52b299