From 59cdc98205b12f68c318bc4cdaffe524e0e0aea7 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 17 Apr 2018 11:35:53 +0100 Subject: [PATCH 01/66] Emit detailed compiler trace under -Yprofile-trace Suitable for viewing directly in chrome://tracing, or post processing with https://github.com/retronym/chrome-trace-to-flamegraph Co-Authored-By: Mike Skells --- project/ScriptCommands.scala | 4 +- src/compiler/scala/tools/nsc/Global.scala | 7 +- .../scala/tools/nsc/profile/Profiler.scala | 251 ++++++++++++---- .../tools/nsc/profile/ThreadPoolFactory.scala | 4 +- .../tools/nsc/settings/ScalaSettings.scala | 4 +- .../tools/nsc/symtab/SymbolLoaders.scala | 46 ++- .../tools/nsc/typechecker/Implicits.scala | 9 + .../scala/tools/nsc/typechecker/Macros.scala | 10 +- .../scala/tools/nsc/typechecker/Typers.scala | 267 ++++++++++-------- .../scala/reflect/internal/SymbolTable.scala | 4 + .../reflect/internal/util/ChromeTrace.scala | 189 +++++++++++++ .../reflect/internal/util/FileUtils.scala | 199 +++++++++++++ .../reflect/internal/util/FileUtilsTest.scala | 89 ++++++ 13 files changed, 878 insertions(+), 205 deletions(-) create mode 100644 src/reflect/scala/reflect/internal/util/ChromeTrace.scala create mode 100644 src/reflect/scala/reflect/internal/util/FileUtils.scala create mode 100644 test/junit/scala/reflect/internal/util/FileUtilsTest.scala diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala index a5564242ebf..4a4003066a7 100644 --- a/project/ScriptCommands.scala +++ b/project/ScriptCommands.scala @@ -109,11 +109,11 @@ object ScriptCommands { Project.setProject(session, newStructure, state) } - private[this] val enableOptimizer = Seq( + val enableOptimizer = Seq( scalacOptions in Compile in ThisBuild ++= Seq("-opt:l:inline", "-opt-inline-from:scala/**") ) - private[this] val noDocs = Seq( + val noDocs = Seq( publishArtifact in (Compile, packageDoc) in ThisBuild := false ) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 79358c172df..f6d89af3b52 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -445,8 +445,10 @@ class Global(var currentSettings: Settings, reporter0: Reporter) currentRun.informUnitStarting(this, unit) val unit0 = currentUnit currentRun.currentUnit = unit + currentRun.profiler.beforeUnit(phase, unit.source.file) try apply(unit) finally { + currentRun.profiler.afterUnit(phase, unit.source.file) currentRun.currentUnit = unit0 currentRun.advanceUnit() } @@ -1107,6 +1109,9 @@ class Global(var currentSettings: Settings, reporter0: Reporter) def newJavaUnitParser(unit: CompilationUnit): JavaUnitParser = new JavaUnitParser(unit) + override protected[scala] def currentRunProfilerBeforeCompletion(root: Symbol, associatedFile: AbstractFile): Unit = currentRun.profiler.beforeCompletion(root, associatedFile) + override protected[scala] def currentRunProfilerAfterCompletion(root: Symbol, associatedFile: AbstractFile): Unit = currentRun.profiler.afterCompletion(root, associatedFile) + /** A Run is a single execution of the compiler on a set of units. */ class Run extends RunContextApi with RunReporting with RunParsing { @@ -1455,7 +1460,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) private final val GlobalPhaseName = "global (synthetic)" protected final val totalCompileTime = statistics.newTimer("#total compile time", GlobalPhaseName) - def compileUnits(units: List[CompilationUnit], fromPhase: Phase): Unit = compileUnitsInternal(units,fromPhase) + def compileUnits(units: List[CompilationUnit], fromPhase: Phase): Unit = compileUnitsInternal(units,fromPhase) private def compileUnitsInternal(units: List[CompilationUnit], fromPhase: Phase) { units foreach addUnit reporter.reset() diff --git a/src/compiler/scala/tools/nsc/profile/Profiler.scala b/src/compiler/scala/tools/nsc/profile/Profiler.scala index 87654e8e8ba..68cfab2f16e 100644 --- a/src/compiler/scala/tools/nsc/profile/Profiler.scala +++ b/src/compiler/scala/tools/nsc/profile/Profiler.scala @@ -14,31 +14,41 @@ package scala.tools.nsc.profile import java.io.{FileWriter, PrintWriter} import java.lang.management.ManagementFactory +import java.nio.file.{Files, Paths} import java.util.ServiceLoader import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicInteger + import javax.management.openmbean.CompositeData import javax.management.{Notification, NotificationEmitter, NotificationListener} -import scala.tools.nsc.{Phase, Settings} +import scala.collection.mutable +import scala.collection.mutable.ArrayBuffer +import scala.reflect.internal.util.ChromeTrace +import scala.reflect.io.{AbstractFile, File} +import scala.tools.nsc.{Global, Phase, Settings} object Profiler { def apply(settings: Settings):Profiler = if (!settings.YprofileEnabled) NoOpProfiler else { - val reporter = if(settings.YprofileDestination.isSetByUser) - new StreamProfileReporter(new PrintWriter(new FileWriter(settings.YprofileDestination.value, true))) - else ConsoleProfileReporter + val reporter = settings.YprofileDestination.value match { + case _ if !settings.YprofileDestination.isSetByUser => NoOpProfileReporter + case "-" => ConsoleProfileReporter + case path => new StreamProfileReporter(new PrintWriter(new FileWriter(path, true))) + } new RealProfiler(reporter, settings) } - private[profile] val emptySnap = ProfileSnap(0, "", 0, 0, 0, 0, 0, 0) + private[profile] val emptySnap = ProfileSnap(0, "", 0, 0, 0, 0, 0, 0, 0, 0) +} +case class GcEventData(pool:String, reportTimeNs: Long, gcStartMillis:Long, gcEndMillis:Long, durationMillis: Long, name:String, action:String, cause:String, threads:Long) { + val endNanos = System.nanoTime() } -case class GcEventData(pool:String, reportTimeNs: Long, gcStartMillis:Long, gcEndMillis:Long, name:String, action:String, cause:String, threads:Long) case class ProfileSnap(threadId: Long, threadName: String, snapTimeNanos : Long, - idleTimeNanos:Long, cpuTimeNanos: Long, userTimeNanos: Long, - allocatedBytes:Long, heapBytes:Long) { + idleTimeNanos:Long, cpuTimeNanos: Long, userTimeNanos: Long, + allocatedBytes:Long, heapBytes:Long, totalClassesLoaded: Long, totalJITCompilationTime: Long) { def updateHeap(heapBytes:Long) = { copy(heapBytes = heapBytes) } @@ -73,13 +83,29 @@ case class ProfileRange(start: ProfileSnap, end:ProfileSnap, phase:Phase, purpos def retainedHeapMB = toMegaBytes(end.heapBytes - start.heapBytes) } -sealed trait Profiler { +sealed abstract class Profiler { def finished(): Unit def beforePhase(phase: Phase): ProfileSnap def afterPhase(phase: Phase, profileBefore: ProfileSnap): Unit + + def beforeUnit(phase: Phase, file: AbstractFile): Unit + + def afterUnit(phase: Phase, file: AbstractFile): Unit + + def beforeTypedImplDef(sym: Global#Symbol): Unit = () + def afterTypedImplDef(sym: Global#Symbol): Unit = () + + def beforeImplicitSearch(pt: Global#Type): Unit = () + def afterImplicitSearch(pt: Global#Type): Unit = () + + def beforeMacroExpansion(macroSym: Global#Symbol): Unit = () + def afterMacroExpansion(macroSym: Global#Symbol): Unit = () + + def beforeCompletion(root: Global#Symbol, associatedFile: AbstractFile): Unit = () + def afterCompletion(root: Global#Symbol, associatedFile: AbstractFile): Unit = () } private [profile] object NoOpProfiler extends Profiler { @@ -87,6 +113,8 @@ private [profile] object NoOpProfiler extends Profiler { override def afterPhase(phase: Phase, profileBefore: ProfileSnap): Unit = () + override def beforeUnit(phase: Phase, file: AbstractFile): Unit = () + override def afterUnit(phase: Phase, file: AbstractFile): Unit = () override def finished(): Unit = () } private [profile] object RealProfiler { @@ -99,17 +127,55 @@ private [profile] object RealProfiler { val threadMx = ExtendedThreadMxBean.proxy if (threadMx.isThreadCpuTimeSupported) threadMx.setThreadCpuTimeEnabled(true) private val idGen = new AtomicInteger() + lazy val allPlugins = ServiceLoader.load(classOf[ProfilerPlugin]).iterator().asScala.toList + + private[profile] def snapThread( idleTimeNanos:Long): ProfileSnap = { + val current = Thread.currentThread() + val allocatedBytes = threadMx.getThreadAllocatedBytes(Thread.currentThread().getId) + ProfileSnap( + threadId = current.getId, + threadName = current.getName, + snapTimeNanos = System.nanoTime(), + idleTimeNanos = idleTimeNanos, + cpuTimeNanos = threadMx.getCurrentThreadCpuTime, + userTimeNanos = threadMx.getCurrentThreadUserTime, + allocatedBytes = allocatedBytes, + heapBytes = readHeapUsage(), + totalClassesLoaded = classLoaderMx.getTotalLoadedClassCount, + totalJITCompilationTime = compileMx.getTotalCompilationTime + ) + } + private def readHeapUsage() = RealProfiler.memoryMx.getHeapMemoryUsage.getUsed } private [profile] class RealProfiler(reporter : ProfileReporter, val settings: Settings) extends Profiler with NotificationListener { + private val mainThread = Thread.currentThread() + val id = RealProfiler.idGen.incrementAndGet() + object Category { + final val Run = "run" + final val Phase = "phase" + final val File = "file" + final val TypeCheck = "typecheck" + final val Implicit = "implicit" + final val Macro = "macro" + final val Completion = "completion" + } + + private val chromeTrace = { + if (settings.YprofileTrace.isSetByUser) + new ChromeTrace(Paths.get(settings.YprofileTrace.value)) + else null + } + if (chromeTrace != null) + chromeTrace.traceDurationEventStart(Category.Run, "scalac-" + id) + def completeBackground(threadRange: ProfileRange): Unit = { reporter.reportBackground(this, threadRange) } def outDir = settings.outputDirs.getSingleOutput.getOrElse(settings.outputDirs.outputs.head._2.file).toString - val id = RealProfiler.idGen.incrementAndGet() RealProfiler.gcMx foreach { case emitter: NotificationEmitter => emitter.addNotificationListener(this, null, null) case gc => println(s"Cant connect gcListener to ${gc.getClass}") @@ -117,25 +183,6 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S val active = RealProfiler.allPlugins map (_.generate(this, settings)) - private val mainThread = Thread.currentThread() - - private[profile] def snapThread( idleTimeNanos:Long): ProfileSnap = { - import RealProfiler._ - val current = Thread.currentThread() - - ProfileSnap( - threadId = current.getId, - threadName = current.getName, - snapTimeNanos = System.nanoTime(), - idleTimeNanos = idleTimeNanos, - cpuTimeNanos = threadMx.getCurrentThreadCpuTime, - userTimeNanos = threadMx.getCurrentThreadUserTime, - allocatedBytes = threadMx.getThreadAllocatedBytes(Thread.currentThread().getId), - heapBytes = readHeapUsage() - ) - } - private def readHeapUsage() = RealProfiler.memoryMx.getHeapMemoryUsage.getUsed - private def doGC: Unit = { System.gc() System.runFinalization() @@ -151,8 +198,19 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S case gc => } reporter.close(this) + if (chromeTrace != null) { + for (gcEvent <- gcEvents) { + val durationNanos = TimeUnit.MILLISECONDS.toNanos(gcEvent.durationMillis) + val startNanos = gcEvent.endNanos - durationNanos + chromeTrace.traceDurationEvent(gcEvent.name, startNanos, durationNanos, GcThreadId) + } + chromeTrace.traceDurationEventEnd(Category.Run, "scalac-" + id) + chromeTrace.close() + } } + private val gcEvents = ArrayBuffer[GcEventData]() + private val GcThreadId = "GC" override def handleNotification(notification: Notification, handback: scala.Any): Unit = { import java.lang.{Long => jLong} @@ -173,13 +231,30 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S val startTime = info.get("startTime").asInstanceOf[jLong].longValue() val endTime = info.get("endTime").asInstanceOf[jLong].longValue() val threads = info.get("GcThreadCount").asInstanceOf[jInt].longValue() - reporter.reportGc(GcEventData("", reportNs, startTime, endTime, name, action, cause, threads)) + val gcEvent = GcEventData("", reportNs, startTime, endTime, duration, name, action, cause, threads) + synchronized { + gcEvents += gcEvent + } + reporter.reportGc(gcEvent) + } + } + + override def beforePhase(phase: Phase): ProfileSnap = { + assert(mainThread eq Thread.currentThread()) + if (chromeTrace != null) chromeTrace.traceDurationEventStart(Category.Phase, phase.name) + if (settings.YprofileRunGcBetweenPhases.containsPhase(phase)) + doGC + if (settings.YprofileExternalTool.containsPhase(phase)) { + println("Profile hook start") + ExternalToolHook.before() } + active foreach {_.beforePhase(phase)} + RealProfiler.snapThread(0) } override def afterPhase(phase: Phase, snapBefore: ProfileSnap): Unit = { assert(mainThread eq Thread.currentThread()) - val initialSnap = snapThread(0) + val initialSnap = RealProfiler.snapThread(0) active foreach {_.afterPhase(phase)} if (settings.YprofileExternalTool.containsPhase(phase)) { println("Profile hook stop") @@ -187,24 +262,85 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S } val finalSnap = if (settings.YprofileRunGcBetweenPhases.containsPhase(phase)) { doGC - initialSnap.updateHeap(readHeapUsage()) + initialSnap.updateHeap(RealProfiler.readHeapUsage()) } else initialSnap + if (chromeTrace != null) chromeTrace.traceDurationEventEnd(Category.Phase, phase.name) reporter.reportForeground(this, ProfileRange(snapBefore, finalSnap, phase, "", 0, Thread.currentThread)) } - override def beforePhase(phase: Phase): ProfileSnap = { + override def beforeUnit(phase: Phase, file: AbstractFile): Unit = { assert(mainThread eq Thread.currentThread()) - if (settings.YprofileRunGcBetweenPhases.containsPhase(phase)) - doGC - if (settings.YprofileExternalTool.containsPhase(phase)) { - println("Profile hook start") - ExternalToolHook.before() + if (chromeTrace != null) chromeTrace.traceDurationEventStart(Category.File, file.name) + } + + private var nextAfterUnitSnap: Long = System.nanoTime() + + override def afterUnit(phase: Phase, file: AbstractFile): Unit = { + assert(mainThread eq Thread.currentThread()) + if (chromeTrace != null) { + val now = System.nanoTime() + chromeTrace.traceDurationEventEnd(Category.File, file.name) + if (now > nextAfterUnitSnap) { + val initialSnap = RealProfiler.snapThread(0) + chromeTrace.traceCounterEvent("allocBytes", "allocBytes", initialSnap.allocatedBytes, processWide = false) + chromeTrace.traceCounterEvent("heapBytes", "heapBytes", initialSnap.heapBytes, processWide = true) + chromeTrace.traceCounterEvent("classesLoaded", "classesLoaded", initialSnap.totalClassesLoaded, processWide = true) + chromeTrace.traceCounterEvent("jitCompilationTime", "jitCompilationTime", initialSnap.totalJITCompilationTime, processWide = true) + chromeTrace.traceCounterEvent("userTime", "userTime", initialSnap.userTimeNanos, processWide = false) + chromeTrace.traceCounterEvent("cpuTime", "cpuTime", initialSnap.cpuTimeNanos, processWide = false) + chromeTrace.traceCounterEvent("idleTime", "idleTime", initialSnap.idleTimeNanos, processWide = false) + nextAfterUnitSnap = System.nanoTime() + 10 * 1000 * 1000 + } } - active foreach {_.beforePhase(phase)} - snapThread(0) } + override def beforeTypedImplDef(sym: Global#Symbol): Unit = { + if (chromeTrace != null) chromeTrace.traceDurationEventStart(Category.TypeCheck, sym.rawname.toString) + } + override def afterTypedImplDef(sym: Global#Symbol): Unit = { + if (chromeTrace != null) chromeTrace.traceDurationEventEnd(Category.TypeCheck, sym.rawname.toString) + } + + override def beforeImplicitSearch(pt: Global#Type): Unit = { + if (chromeTrace != null) chromeTrace.traceDurationEventStart(Category.Implicit, "?[" + pt.typeSymbol.rawname + "]", colour = "yellow") + } + + override def afterImplicitSearch(pt: Global#Type): Unit = { + if (chromeTrace != null) chromeTrace.traceDurationEventEnd(Category.Implicit, "?[" + pt.typeSymbol.rawname + "]", colour = "yellow") + } + + override def beforeMacroExpansion(macroSym: Global#Symbol): Unit = { + if (chromeTrace != null) chromeTrace.traceDurationEventStart(Category.Macro, "«" + macroSym.rawname + "»", colour = "olive") + } + + override def afterMacroExpansion(macroSym: Global#Symbol): Unit = { + if (chromeTrace != null) chromeTrace.traceDurationEventEnd(Category.Macro, "«" + macroSym.rawname + "»", colour = "olive") + } + + override def beforeCompletion(root: Global#Symbol, associatedFile: AbstractFile): Unit = { + if (chromeTrace != null) { + chromeTrace.traceDurationEventStart(Category.Completion, "↯", colour = "thread_state_sleeping") + chromeTrace.traceDurationEventStart(Category.File, associatedFile.name) + chromeTrace.traceDurationEventStart(Category.Completion, completionName(root, associatedFile)) + } + } + + override def afterCompletion(root: Global#Symbol, associatedFile: AbstractFile): Unit = { + if (chromeTrace != null) { + chromeTrace.traceDurationEventEnd(Category.Completion, completionName(root, associatedFile)) + chromeTrace.traceDurationEventEnd(Category.File, associatedFile.name) + chromeTrace.traceDurationEventEnd(Category.Completion, "↯", colour = "thread_state_sleeping") + } + } + + private def completionName(root: Global#Symbol, associatedFile: AbstractFile): String = { + if (root.hasPackageFlag || root.isTopLevel) root.javaBinaryNameString + else { + val enclosing = root.enclosingTopLevelClass + enclosing.javaBinaryNameString + "::" + root.rawname.toString + } + } } object EventType extends Enumeration { @@ -228,24 +364,23 @@ sealed trait ProfileReporter { } object ConsoleProfileReporter extends ProfileReporter { + private val outWriter = new PrintWriter(Console.out) + private val delegate = new StreamProfileReporter(new PrintWriter(Console.out)) + override def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit = delegate.reportBackground(profiler, threadRange) + override def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit = delegate.reportForeground(profiler, threadRange) + override def close(profiler: RealProfiler): Unit = outWriter.flush() + + override def header(profiler: RealProfiler): Unit = delegate.header(profiler) + override def reportGc(data: GcEventData): Unit = delegate.reportGc(data) +} - - override def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit = - // TODO - ??? - override def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit = - // TODO - ??? - +object NoOpProfileReporter extends ProfileReporter { + override def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit = () + override def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit = () override def close(profiler: RealProfiler): Unit = () - override def header(profiler: RealProfiler): Unit = { - println(s"Profiler start (${profiler.id}) ${profiler.outDir}") - } - - override def reportGc(data: GcEventData): Unit = { - println(f"Profiler GC reported ${data.gcEndMillis - data.gcStartMillis}ms") - } + override def header(profiler: RealProfiler): Unit = () + override def reportGc(data: GcEventData): Unit = () } class StreamProfileReporter(out:PrintWriter) extends ProfileReporter { @@ -271,10 +406,8 @@ class StreamProfileReporter(out:PrintWriter) extends ProfileReporter { out.println(s"${EventType.GC},$start,${data.reportTimeNs},${data.gcStartMillis}, ${data.gcEndMillis},${data.name},${data.action},${data.cause},${data.threads}") } - override def close(profiler: RealProfiler): Unit = { - out.flush - out.close + out.flush() + out.close() } } - diff --git a/src/compiler/scala/tools/nsc/profile/ThreadPoolFactory.scala b/src/compiler/scala/tools/nsc/profile/ThreadPoolFactory.scala index 822a7317d28..641526a1de4 100644 --- a/src/compiler/scala/tools/nsc/profile/ThreadPoolFactory.scala +++ b/src/compiler/scala/tools/nsc/profile/ThreadPoolFactory.scala @@ -98,9 +98,9 @@ object ThreadPoolFactory { val data = new ThreadProfileData localData.set(data) - val profileStart = profiler.snapThread(0) + val profileStart = RealProfiler.snapThread(0) try worker.run finally { - val snap = profiler.snapThread(data.idleNs) + val snap = RealProfiler.snapThread(data.idleNs) val threadRange = ProfileRange(profileStart, snap, phase, shortId, data.taskCount, Thread.currentThread()) profiler.completeBackground(threadRange) } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 2783b74a9d5..f4b94a92355 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -414,7 +414,9 @@ trait ScalaSettings extends AbsScalaSettings override def YhotStatisticsEnabled = YhotStatistics.value val YprofileEnabled = BooleanSetting("-Yprofile-enabled", "Enable profiling.") - val YprofileDestination = StringSetting("-Yprofile-destination", "file", "where to send profiling output - specify a file, default is to the console.", ""). + val YprofileDestination = StringSetting("-Yprofile-destination", "file", "Profiling output - specify a file or `-` for console.", ""). + withPostSetHook( _ => YprofileEnabled.value = true ) + val YprofileTrace = StringSetting("-Yprofile-trace", "file", "Capture trace of compilation in Chrome Trace format", "profile.trace"). withPostSetHook( _ => YprofileEnabled.value = true ) val YprofileExternalTool = PhasesSetting("-Yprofile-external-tool", "Enable profiling for a phase using an external tool hook. Generally only useful for a single phase", "typer"). withPostSetHook( _ => YprofileEnabled.value = true ) diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 6444823efce..2ad68f4d620 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -203,6 +203,7 @@ abstract class SymbolLoaders { protected def doComplete(root: Symbol): Unit def sourcefile: Option[AbstractFile] = None + def associatedFile(self: Symbol): AbstractFile = NoAbstractFile /** * Description of the resource (ClassPath, AbstractFile) @@ -221,23 +222,29 @@ abstract class SymbolLoaders { } override def complete(root: Symbol) { + val assocFile = associatedFile(root) + currentRunProfilerBeforeCompletion(root, assocFile) try { - val start = java.util.concurrent.TimeUnit.NANOSECONDS.toMillis(System.nanoTime()) - val currentphase = phase - doComplete(root) - phase = currentphase - informTime("loaded " + description, start) - ok = true - setSource(root) - setSource(root.companionSymbol) // module -> class, class -> module - } - catch { - case ex @ (_: IOException | _: MissingRequirementError) => - ok = false - signalError(root, ex) + try { + val start = java.util.concurrent.TimeUnit.NANOSECONDS.toMillis(System.nanoTime()) + val currentphase = phase + doComplete(root) + phase = currentphase + informTime("loaded " + description, start) + ok = true + setSource(root) + setSource(root.companionSymbol) // module -> class, class -> module + } + catch { + case ex@(_: IOException | _: MissingRequirementError) => + ok = false + signalError(root, ex) + } + initRoot(root) + if (!root.isPackageClass) initRoot(root.companionSymbol) + } finally { + currentRunProfilerAfterCompletion(root, assocFile) } - initRoot(root) - if (!root.isPackageClass) initRoot(root.companionSymbol) } override def load(root: Symbol) { complete(root) } @@ -336,18 +343,27 @@ abstract class SymbolLoaders { if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.classReadNanos, start) } override def sourcefile: Option[AbstractFile] = classfileParser.srcfile + override def associatedFile(self: Symbol): AbstractFile = classfile } class SourcefileLoader(val srcfile: AbstractFile) extends SymbolLoader with FlagAssigningCompleter { protected def description = "source file "+ srcfile.toString override def fromSource = true override def sourcefile = Some(srcfile) + override def associatedFile(self: Symbol): AbstractFile = srcfile protected def doComplete(root: Symbol): Unit = compileLate(srcfile) } object moduleClassLoader extends SymbolLoader with FlagAssigningCompleter { protected def description = "module class loader" protected def doComplete(root: Symbol) { root.sourceModule.initialize } + override def associatedFile(self: Symbol): AbstractFile = { + val sourceModule = self.sourceModule + sourceModule.rawInfo match { + case loader: SymbolLoader => loader.associatedFile(sourceModule) + case _ => super.associatedFile(self) + } + } } /** used from classfile parser to avoid cycles */ diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 89169137052..2cc7fa72989 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -87,6 +87,15 @@ trait Implicits { * @return A search result */ def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context, saveAmbiguousDivergent: Boolean, pos: Position): SearchResult = { + currentRun.profiler.beforeImplicitSearch(pt) + try { + inferImplicit1(tree, pt, reportAmbiguous, isView, context, saveAmbiguousDivergent, pos) + } finally { + currentRun.profiler.afterImplicitSearch(pt) + } + } + + private def inferImplicit1(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context, saveAmbiguousDivergent: Boolean, pos: Position): SearchResult = { // Note that the isInvalidConversionTarget seems to make a lot more sense right here, before all the // work is performed, than at the point where it presently exists. val shouldPrint = printTypings && !context.undetparams.isEmpty diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 1755042d339..3b93f3f4134 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -766,7 +766,15 @@ trait Macros extends MacroRuntimes with Traces with Helpers { /** Expands a term macro used in apply role as `M(2)(3)` in `val x = M(2)(3)`. * @see DefMacroExpander */ - def macroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Tree = pluginsMacroExpand(typer, expandee, mode, pt) + def macroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Tree = { + val macroSym = expandee.symbol + currentRun.profiler.beforeMacroExpansion(macroSym) + try { + pluginsMacroExpand(typer, expandee, mode, pt) + } finally { + currentRun.profiler.afterMacroExpansion(macroSym) + } + } /** Default implementation of `macroExpand`. * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsMacroExpand for more details) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 354201083b3..0a37488f28f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1821,38 +1821,43 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typedClassDef(cdef: ClassDef): Tree = { val clazz = cdef.symbol - val typedMods = typedModifiers(cdef.mods) - assert(clazz != NoSymbol, cdef) - reenterTypeParams(cdef.tparams) - val tparams1 = cdef.tparams mapConserve (typedTypeDef) - val impl1 = newTyper(context.make(cdef.impl, clazz, newScope)).typedTemplate(cdef.impl, typedParentTypes(cdef.impl)) - val impl2 = finishMethodSynthesis(impl1, clazz, context) - if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.typeSymbol == AnyClass) - checkEphemeral(clazz, impl2.body) - - if ((clazz isNonBottomSubClass ClassfileAnnotationClass) && (clazz != ClassfileAnnotationClass)) { - if (!clazz.owner.isPackageClass) - context.error(clazz.pos, "inner classes cannot be classfile annotations") - // Ignore @SerialVersionUID, because it is special-cased and handled completely differently. - // It only extends ClassfileAnnotationClass instead of StaticAnnotation to get the enforcement - // of constant argument values "for free". Related to scala/bug#7041. - else if (clazz != SerialVersionUIDAttr) restrictionWarning(cdef.pos, unit, - """|subclassing Classfile does not - |make your annotation visible at runtime. If that is what - |you want, you must write the annotation class in Java.""".stripMargin) - } - - warnTypeParameterShadow(tparams1, clazz) - - if (!isPastTyper) { - for (ann <- clazz.getAnnotation(DeprecatedAttr)) { - val m = companionSymbolOf(clazz, context) - if (m != NoSymbol) - m.moduleClass.addAnnotation(AnnotationInfo(ann.atp, ann.args, List())) - } - } - treeCopy.ClassDef(cdef, typedMods, cdef.name, tparams1, impl2) - .setType(NoType) + currentRun.profiler.beforeTypedImplDef(clazz) + try { + val typedMods = typedModifiers(cdef.mods) + assert(clazz != NoSymbol, cdef) + reenterTypeParams(cdef.tparams) + val tparams1 = cdef.tparams mapConserve (typedTypeDef) + val impl1 = newTyper(context.make(cdef.impl, clazz, newScope)).typedTemplate(cdef.impl, typedParentTypes(cdef.impl)) + val impl2 = finishMethodSynthesis(impl1, clazz, context) + if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.typeSymbol == AnyClass) + checkEphemeral(clazz, impl2.body) + + if ((clazz isNonBottomSubClass ClassfileAnnotationClass) && (clazz != ClassfileAnnotationClass)) { + if (!clazz.owner.isPackageClass) + context.error(clazz.pos, "inner classes cannot be classfile annotations") + // Ignore @SerialVersionUID, because it is special-cased and handled completely differently. + // It only extends ClassfileAnnotationClass instead of StaticAnnotation to get the enforcement + // of constant argument values "for free". Related to scala/bug#7041. + else if (clazz != SerialVersionUIDAttr) restrictionWarning(cdef.pos, unit, + """|subclassing Classfile does not + |make your annotation visible at runtime. If that is what + |you want, you must write the annotation class in Java.""".stripMargin) + } + + warnTypeParameterShadow(tparams1, clazz) + + if (!isPastTyper) { + for (ann <- clazz.getAnnotation(DeprecatedAttr)) { + val m = companionSymbolOf(clazz, context) + if (m != NoSymbol) + m.moduleClass.addAnnotation(AnnotationInfo(ann.atp, ann.args, List())) + } + } + treeCopy.ClassDef(cdef, typedMods, cdef.name, tparams1, impl2) + .setType(NoType) + } finally { + currentRun.profiler.afterTypedImplDef(clazz) + } } def typedModuleDef(mdef: ModuleDef): Tree = { @@ -1862,31 +1867,37 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (linkedClass != NoSymbol) linkedClass.info.decl(nme.CONSTRUCTOR).alternatives foreach (_.initialize) - val clazz = mdef.symbol.moduleClass - val typedMods = typedModifiers(mdef.mods) - assert(clazz != NoSymbol, mdef) - val noSerializable = ( - (linkedClass eq NoSymbol) - || linkedClass.isErroneous - || !linkedClass.isSerializable - || clazz.isSerializable - ) - val impl1 = newTyper(context.make(mdef.impl, clazz, newScope)).typedTemplate(mdef.impl, { - typedParentTypes(mdef.impl) ++ ( - if (noSerializable) Nil - else { - clazz.makeSerializable() - TypeTree(SerializableTpe).setPos(clazz.pos.focus) :: Nil - } - ) - }) + val clazz = mdef.symbol.moduleClass + currentRun.profiler.beforeTypedImplDef(clazz) + try { - val impl2 = finishMethodSynthesis(impl1, clazz, context) + val typedMods = typedModifiers(mdef.mods) + assert(clazz != NoSymbol, mdef) + val noSerializable = ( + (linkedClass eq NoSymbol) + || linkedClass.isErroneous + || !linkedClass.isSerializable + || clazz.isSerializable + ) + val impl1 = newTyper(context.make(mdef.impl, clazz, newScope)).typedTemplate(mdef.impl, { + typedParentTypes(mdef.impl) ++ ( + if (noSerializable) Nil + else { + clazz.makeSerializable() + TypeTree(SerializableTpe).setPos(clazz.pos.focus) :: Nil + } + ) + }) - if (settings.isScala211 && mdef.symbol == PredefModule) - ensurePredefParentsAreInSameSourceFile(impl2) + val impl2 = finishMethodSynthesis(impl1, clazz, context) - treeCopy.ModuleDef(mdef, typedMods, mdef.name, impl2) setType NoType + if (settings.isScala211 && mdef.symbol == PredefModule) + ensurePredefParentsAreInSameSourceFile(impl2) + + treeCopy.ModuleDef(mdef, typedMods, mdef.name, impl2) setType NoType + } finally { + currentRun.profiler.afterTypedImplDef(clazz) + } } private def ensurePredefParentsAreInSameSourceFile(template: Template) = { @@ -2034,13 +2045,18 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typedValDef(vdef: ValDef): ValDef = { val sym = vdef.symbol - val valDefTyper = { - val maybeConstrCtx = - if ((sym.isParameter || sym.isEarlyInitialized) && sym.owner.isConstructor) context.makeConstructorContext - else context - newTyper(maybeConstrCtx.makeNewScope(vdef, sym)) + currentRun.profiler.beforeTypedImplDef(sym) + try { + val valDefTyper = { + val maybeConstrCtx = + if ((sym.isParameter || sym.isEarlyInitialized) && sym.owner.isConstructor) context.makeConstructorContext + else context + newTyper(maybeConstrCtx.makeNewScope(vdef, sym)) + } + valDefTyper.typedValDefImpl(vdef) + } finally { + currentRun.profiler.afterTypedImplDef(sym) } - valDefTyper.typedValDefImpl(vdef) } // use typedValDef instead. this version is called after creating a new context for the ValDef @@ -2259,89 +2275,92 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } def typedDefDef(ddef: DefDef): DefDef = { - // an accessor's type completer may mutate a type inside `ddef` (`== context.unit.synthetics(ddef.symbol)`) - // concretely: it sets the setter's parameter type or the getter's return type (when derived from a valdef with empty tpt) val meth = ddef.symbol.initialize + currentRun.profiler.beforeTypedImplDef(meth) + try { - reenterTypeParams(ddef.tparams) - reenterValueParams(ddef.vparamss) + reenterTypeParams(ddef.tparams) + reenterValueParams(ddef.vparamss) - // for `val` and `var` parameter, look at `target` meta-annotation - if (!isPastTyper && meth.isPrimaryConstructor) { - for (vparams <- ddef.vparamss; vd <- vparams) { - if (vd.mods.isParamAccessor) { - vd.symbol setAnnotations (vd.symbol.annotations filter AnnotationInfo.mkFilter(ParamTargetClass, defaultRetention = true)) + // for `val` and `var` parameter, look at `target` meta-annotation + if (!isPastTyper && meth.isPrimaryConstructor) { + for (vparams <- ddef.vparamss; vd <- vparams) { + if (vd.mods.isParamAccessor) { + vd.symbol setAnnotations (vd.symbol.annotations filter AnnotationInfo.mkFilter(ParamTargetClass, defaultRetention = true)) + } } } - } - val tparams1 = ddef.tparams mapConserve typedTypeDef - val vparamss1 = ddef.vparamss mapConserve (_ mapConserve typedValDef) + val tparams1 = ddef.tparams mapConserve typedTypeDef + val vparamss1 = ddef.vparamss mapConserve (_ mapConserve typedValDef) - warnTypeParameterShadow(tparams1, meth) + warnTypeParameterShadow(tparams1, meth) - meth.annotations.map(_.completeInfo()) + meth.annotations.map(_.completeInfo()) - for (vparams1 <- vparamss1; vparam1 <- vparams1 dropRight 1) - if (isRepeatedParamType(vparam1.symbol.tpe)) - StarParamNotLastError(vparam1) + for (vparams1 <- vparamss1; vparam1 <- vparams1 dropRight 1) + if (isRepeatedParamType(vparam1.symbol.tpe)) + StarParamNotLastError(vparam1) - val tpt1 = checkNoEscaping.privates(this, meth, typedType(ddef.tpt)) - checkNonCyclic(ddef, tpt1) - ddef.tpt.setType(tpt1.tpe) - val typedMods = typedModifiers(ddef.mods) - var rhs1 = - if (ddef.name == nme.CONSTRUCTOR && !ddef.symbol.hasStaticFlag) { // need this to make it possible to generate static ctors - if (!meth.isPrimaryConstructor && + val tpt1 = checkNoEscaping.privates(this, meth, typedType(ddef.tpt)) + checkNonCyclic(ddef, tpt1) + ddef.tpt.setType(tpt1.tpe) + val typedMods = typedModifiers(ddef.mods) + var rhs1 = + if (ddef.name == nme.CONSTRUCTOR && !ddef.symbol.hasStaticFlag) { // need this to make it possible to generate static ctors + if (!meth.isPrimaryConstructor && (!meth.owner.isClass || - meth.owner.isModuleClass || - meth.owner.isAnonOrRefinementClass)) - InvalidConstructorDefError(ddef) - typed(ddef.rhs) - } else if (meth.isMacro) { - // typechecking macro bodies is sort of unconventional - // that's why we employ our custom typing scheme orchestrated outside of the typer - transformedOr(ddef.rhs, typedMacroBody(this, ddef)) - } else { - transformedOrTyped(ddef.rhs, EXPRmode, tpt1.tpe) - } + meth.owner.isModuleClass || + meth.owner.isAnonOrRefinementClass)) + InvalidConstructorDefError(ddef) + typed(ddef.rhs) + } else if (meth.isMacro) { + // typechecking macro bodies is sort of unconventional + // that's why we employ our custom typing scheme orchestrated outside of the typer + transformedOr(ddef.rhs, typedMacroBody(this, ddef)) + } else { + transformedOrTyped(ddef.rhs, EXPRmode, tpt1.tpe) + } - if (meth.isClassConstructor && !isPastTyper && !meth.owner.isSubClass(AnyValClass) && !meth.isJava) { - // There are no supercalls for AnyVal or constructors from Java sources, which - // would blow up in computeParamAliases; there's nothing to be computed for them - // anyway. - if (meth.isPrimaryConstructor) - computeParamAliases(meth.owner, vparamss1, rhs1) - else - checkSelfConstructorArgs(ddef, meth.owner) - } + if (meth.isClassConstructor && !isPastTyper && !meth.owner.isSubClass(AnyValClass) && !meth.isJava) { + // There are no supercalls for AnyVal or constructors from Java sources, which + // would blow up in computeParamAliases; there's nothing to be computed for them + // anyway. + if (meth.isPrimaryConstructor) + computeParamAliases(meth.owner, vparamss1, rhs1) + else + checkSelfConstructorArgs(ddef, meth.owner) + } - if (tpt1.tpe.typeSymbol != NothingClass && !context.returnsSeen && rhs1.tpe.typeSymbol != NothingClass) - rhs1 = checkDead(context, rhs1) + if (tpt1.tpe.typeSymbol != NothingClass && !context.returnsSeen && rhs1.tpe.typeSymbol != NothingClass) + rhs1 = checkDead(context, rhs1) - if (!isPastTyper && meth.owner.isClass && + if (!isPastTyper && meth.owner.isClass && meth.paramss.exists(ps => ps.exists(_.hasDefault) && isRepeatedParamType(ps.last.tpe))) - StarWithDefaultError(meth) - - if (!isPastTyper) { - val allParams = meth.paramss.flatten - for (p <- allParams) { - for (n <- p.deprecatedParamName) { - if (allParams.exists(p1 => p != p1 && (p1.name == n || p1.deprecatedParamName.exists(_ == n)))) - DeprecatedParamNameError(p, n) + StarWithDefaultError(meth) + + if (!isPastTyper) { + val allParams = meth.paramss.flatten + for (p <- allParams) { + for (n <- p.deprecatedParamName) { + if (allParams.exists(p1 => p != p1 && (p1.name == n || p1.deprecatedParamName.exists(_ == n)))) + DeprecatedParamNameError(p, n) + } } - } - if (meth.isStructuralRefinementMember) - checkMethodStructuralCompatible(ddef) + if (meth.isStructuralRefinementMember) + checkMethodStructuralCompatible(ddef) - if (meth.isImplicit && !meth.isSynthetic) meth.info.paramss match { - case List(param) :: _ if !param.isImplicit => - checkFeature(ddef.pos, currentRun.runDefinitions.ImplicitConversionsFeature, meth.toString) - case _ => + if (meth.isImplicit && !meth.isSynthetic) meth.info.paramss match { + case List(param) :: _ if !param.isImplicit => + checkFeature(ddef.pos, currentRun.runDefinitions.ImplicitConversionsFeature, meth.toString) + case _ => + } } - } - treeCopy.DefDef(ddef, typedMods, ddef.name, tparams1, vparamss1, tpt1, rhs1) setType NoType + treeCopy.DefDef(ddef, typedMods, ddef.name, tparams1, vparamss1, tpt1, rhs1) setType NoType + } finally { + currentRun.profiler.afterTypedImplDef(meth) + } } def typedTypeDef(tdef: TypeDef): TypeDef = diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 99fd5edd7ac..6b24d90bd48 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -21,6 +21,7 @@ import java.util.concurrent.TimeUnit import scala.collection.mutable.ArrayBuffer import scala.reflect.internal.{TreeGen => InternalTreeGen} +import scala.reflect.io.AbstractFile abstract class SymbolTable extends macros.Universe with Collections @@ -493,6 +494,9 @@ abstract class SymbolTable extends macros.Universe * Adds the `sm` String interpolator to a [[scala.StringContext]]. */ implicit val StringContextStripMarginOps: StringContext => StringContextStripMarginOps = util.StringContextStripMarginOps + + protected[scala] def currentRunProfilerBeforeCompletion(root: Symbol, associatedFile: AbstractFile): Unit = () + protected[scala] def currentRunProfilerAfterCompletion(root: Symbol, associatedFile: AbstractFile): Unit = () } trait SymbolTableStats { diff --git a/src/reflect/scala/reflect/internal/util/ChromeTrace.scala b/src/reflect/scala/reflect/internal/util/ChromeTrace.scala new file mode 100644 index 00000000000..69da5d5982c --- /dev/null +++ b/src/reflect/scala/reflect/internal/util/ChromeTrace.scala @@ -0,0 +1,189 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.reflect.internal.util + +import java.io.Closeable +import java.lang.management.ManagementFactory +import java.nio.file.{Files, Path} +import java.util +import java.util.concurrent.TimeUnit + +import scala.collection.mutable + +object ChromeTrace { + + private object EventType { + final val Start = "B" + final val Instant = "I" + final val End = "E" + final val Complete = "X" + + final val Counter = "C" + + final val AsyncStart = "b" + final val AsyncInstant = "n" + final val AsyncEnd = "e" + } + +} + +/** Allows writing a subset of of https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/preview# + * for use in Chrome's about://tracing or the tooling in https://www.google.com.au/search?q=catapult+tracing&oq=catapult+tracing+&aqs=chrome..69i57.3974j0j4&sourceid=chrome&ie=UTF-8 */ +final class ChromeTrace(f: Path) extends Closeable { + import ChromeTrace.EventType + private val traceWriter = FileUtils.newAsyncBufferedWriter(f) + private val context = mutable.ArrayStack[JsonContext](TopContext) + private val tidCache = new ThreadLocal[String]() { + override def initialValue(): String = Thread.currentThread().getId.formatted("%05d") + } + objStart() + fld("traceEvents") + context.push(ValueContext) + arrStart() + traceWriter.newLine() + + private val pid = ManagementFactory.getRuntimeMXBean().getName().replaceAll("@.*", "") + + override def close(): Unit = { + arrEnd() + objEnd() + context.pop() + tidCache.remove() + traceWriter.close() + } + + def traceDurationEvent(name: String, startNanos: Long, durationNanos: Long, tid: String = this.tid(), pidSuffix: String = ""): Unit = { + val durationMicros = nanosToMicros(durationNanos) + val startMicros = nanosToMicros(startNanos) + objStart() + str("cat", "scalac") + str("name", name) + str("ph", EventType.Complete) + str("tid", tid) + writePid(pidSuffix) + lng("ts", startMicros) + lng("dur", durationMicros) + objEnd() + traceWriter.newLine() + } + + private def writePid(pidSuffix: String) = { + if (pidSuffix == "") + str("pid", pid) + else + str2("pid", pid, "-", pidSuffix) + } + + def traceCounterEvent(name: String, counterName: String, count: Long, processWide: Boolean): Unit = { + objStart() + str("cat", "scalac") + str("name", name) + str("ph", EventType.Counter) + str("tid", tid()) + writePid(pidSuffix = if (processWide) "" else tid()) + lng("ts", microTime()) + fld("args") + objStart() + lng(counterName, count) + objEnd() + objEnd() + traceWriter.newLine() + } + + def traceDurationEventStart(cat: String, name: String, colour: String = "", pidSuffix: String = tid()): Unit = traceDurationEventStartEnd(EventType.Start, cat, name, colour, pidSuffix) + def traceDurationEventEnd(cat: String, name: String, colour: String = "", pidSuffix: String = tid()): Unit = traceDurationEventStartEnd(EventType.End, cat, name, colour, pidSuffix) + + private def traceDurationEventStartEnd(eventType: String, cat: String, name: String, colour: String, pidSuffix: String = ""): Unit = { + objStart() + str("cat", cat) + str("name", name) + str("ph", eventType) + writePid(pidSuffix) + str("tid", tid()) + lng("ts", microTime()) + if (colour != "") { + str("cname", colour) + } + objEnd() + traceWriter.newLine() + } + + private def tid(): String = tidCache.get() + + private def nanosToMicros(t: Long): Long = TimeUnit.NANOSECONDS.toMicros(t) + + private def microTime(): Long = nanosToMicros(System.nanoTime()) + + sealed abstract class JsonContext + case class ArrayContext(var first: Boolean) extends JsonContext + case class ObjectContext(var first: Boolean) extends JsonContext + case object ValueContext extends JsonContext + case object TopContext extends JsonContext + + private def str(name: String, value: String): Unit = { + fld(name) + traceWriter.write("\"") + traceWriter.write(value) // This assumes no escaping is needed + traceWriter.write("\"") + } + private def str2(name: String, value: String, valueContinued1: String, valueContinued2: String): Unit = { + fld(name) + traceWriter.write("\"") + traceWriter.write(value) // This assumes no escaping is needed + traceWriter.write(valueContinued1) // This assumes no escaping is needed + traceWriter.write(valueContinued2) // This assumes no escaping is needed + traceWriter.write("\"") + } + private def lng(name: String, value: Long): Unit = { + fld(name) + traceWriter.write(String.valueOf(value)) + traceWriter.write("") + } + private def objStart(): Unit = { + context.top match { + case ac @ ArrayContext(first) => + if (first) ac.first = false + else traceWriter.write(",") + case _ => + } + context.push(ObjectContext(true)) + traceWriter.write("{") + } + private def objEnd(): Unit = { + traceWriter.write("}") + context.pop() + } + private def arrStart(): Unit = { + traceWriter.write("[") + context.push(ArrayContext(true)) + } + private def arrEnd(): Unit = { + traceWriter.write("]") + context.pop() + } + + private def fld(name: String) = { + val topContext = context.top + topContext match { + case oc @ ObjectContext(first) => + if (first) oc.first = false + else traceWriter.write(",") + case context => + throw new IllegalStateException("Wrong context: " + context) + } + traceWriter.write("\"") + traceWriter.write(name) + traceWriter.write("\"") + traceWriter.write(":") + } +} diff --git a/src/reflect/scala/reflect/internal/util/FileUtils.scala b/src/reflect/scala/reflect/internal/util/FileUtils.scala new file mode 100644 index 00000000000..ef595577564 --- /dev/null +++ b/src/reflect/scala/reflect/internal/util/FileUtils.scala @@ -0,0 +1,199 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.reflect.internal.util + +import java.io.{BufferedWriter, IOException, OutputStreamWriter, Writer} +import java.nio.CharBuffer +import java.nio.charset.{Charset, CharsetEncoder, StandardCharsets} +import java.nio.file.{Files, OpenOption, Path} +import java.util.concurrent.LinkedBlockingQueue +import java.util.concurrent.atomic.AtomicBoolean + + +import scala.concurrent.duration.Duration +import scala.concurrent.{Await, Promise} +import scala.util.{Failure, Success} + +object FileUtils { + def newAsyncBufferedWriter(path: Path, charset: Charset = StandardCharsets.UTF_8, options: Array[OpenOption] = NO_OPTIONS, threadsafe: Boolean = false): LineWriter = { + val encoder: CharsetEncoder = charset.newEncoder + val writer = new OutputStreamWriter(Files.newOutputStream(path, options: _*), encoder) + newAsyncBufferedWriter(new BufferedWriter(writer), threadsafe) + } + def newAsyncBufferedWriter(underlying: Writer, threadsafe: Boolean): LineWriter = { + val async = new AsyncBufferedWriter(underlying) + if (threadsafe) new ThreadsafeWriter(async) else async + } + private val NO_OPTIONS = new Array[OpenOption](0) + + sealed abstract class LineWriter extends Writer { + def newLine(): Unit + } + private class ThreadsafeWriter(val underlying: AsyncBufferedWriter) extends LineWriter { + lock = underlying + override def write(c: Int): Unit = + lock.synchronized (underlying.write(c)) + + override def write(cbuf: Array[Char]): Unit = + lock.synchronized (underlying.write(cbuf)) + + override def write(cbuf: Array[Char], off: Int, len: Int): Unit = + lock.synchronized (underlying.write(cbuf, off, len)) + + override def write(str: String): Unit = + lock.synchronized (underlying.write(str)) + + override def write(str: String, off: Int, len: Int): Unit = + lock.synchronized (underlying.write(str, off, len)) + + override def flush(): Unit = + lock.synchronized (underlying.flush()) + + override def close(): Unit = + lock.synchronized (underlying.close()) + + override def newLine(): Unit = + lock.synchronized (underlying.newLine()) + + } + + private object AsyncBufferedWriter { + private val Close = CharBuffer.allocate(0) + private val Flush = CharBuffer.allocate(0) + } + private class AsyncBufferedWriter(val underlying: Writer, bufferSize : Int = 4096) extends LineWriter { + private var current: CharBuffer = allocate + override def write(c: Int): Unit = super.write(c) + private def flushAsync(): Unit = { + background.ensureProcessed(current) + current = allocate + } +// allocate or reuse a CharArray which is guaranteed to have a backing array + private def allocate: CharBuffer = { + val reused = background.reuseBuffer + if (reused eq null) CharBuffer.allocate(bufferSize) + else { + //we don't care about race conditions + background.reuseBuffer = null + reused.clear() + reused + } + } + + override def write(cbuf: Array[Char], initialOffset: Int, initialLength: Int): Unit = { + var offset = initialOffset + var length = initialLength + while (length > 0) { + val capacity = current.remaining() + if (length <= capacity) { + current.put(cbuf, offset, length) + length = 0 + } else { + current.put(cbuf, offset, capacity) + flushAsync() + length -= capacity + offset += capacity + } + } + } + + override def write(s: String, initialOffset: Int, initialLength: Int): Unit = { + var offset = initialOffset + var length = initialLength + while (length > 0) { + val capacity = current.remaining() + if (length <= capacity) { + current.put(s, offset, offset + length) + length = 0 + } else { + current.put(s, offset, offset + capacity) + flushAsync() + length -= capacity + offset += capacity + } + } + } + + def newLine(): Unit = write(scala.util.Properties.lineSeparator) + + /** slightly breaks the flush contract in that the flush is not complete when the method returns */ + override def flush(): Unit = { + flushAsync() + } + + override def close(): Unit = { + background.ensureProcessed(current) + background.ensureProcessed(AsyncBufferedWriter.Close) + current = null + Await.result(background.asyncStatus.future, Duration.Inf) + underlying.close() + } + private object background extends Runnable{ + + import scala.concurrent.ExecutionContext.Implicits.global + + private val pending = new LinkedBlockingQueue[CharBuffer] + //a failure detected will case an Failure, Success indicates a close + val asyncStatus = Promise[Unit]() + private val scheduled = new AtomicBoolean + @volatile var reuseBuffer: CharBuffer = _ + + def ensureProcessed(buffer: CharBuffer): Unit = { + if (asyncStatus.isCompleted) { + asyncStatus.future.value.get match { + case Success(()) => throw new IllegalStateException("closed") + case Failure(t) => throw new IOException("async failure", t) + } + } + + //order is essential - add to the queue before the CAS + pending.add(buffer) + if (scheduled.compareAndSet(false, true)) { + global.execute(background) + } + } + + def run(): Unit = { + try { + while (!pending.isEmpty) { + val next = pending.poll() + if (next eq AsyncBufferedWriter.Flush) { + underlying.flush() + } else if (next eq AsyncBufferedWriter.Close) { + underlying.flush() + underlying.close() + asyncStatus.trySuccess(()) + } else { + val array = next.array() + next.flip() + underlying.write(array, next.arrayOffset() + next.position(), next.limit()) + reuseBuffer = next + } + } + } catch { + case t: Throwable => + asyncStatus.tryFailure(t) + throw t + } + finally scheduled.set(false) + + //we are not scheduled any more + //as a last check ensure that we didnt race with an addition to the queue + //order is essential - queue is checked before CAS + if ((!pending.isEmpty) && scheduled.compareAndSet(false, true)) { + global.execute(background) + } + } + } + } +} diff --git a/test/junit/scala/reflect/internal/util/FileUtilsTest.scala b/test/junit/scala/reflect/internal/util/FileUtilsTest.scala new file mode 100644 index 00000000000..21eba42985b --- /dev/null +++ b/test/junit/scala/reflect/internal/util/FileUtilsTest.scala @@ -0,0 +1,89 @@ +package scala.reflect.internal.util + +import java.io._ + +import org.junit.Assert._ +import org.junit._ + +class FileUtilsTest { + + @Test def writeIsSame(): Unit = { + val fileTest = File.createTempFile("FileUtilsTest", "t1") + val fileExpected = File.createTempFile("FileUtilsTest", "t2") + + val sTest = FileUtils.newAsyncBufferedWriter(new FileWriter(fileTest), false) + val sExpected = new BufferedWriter(new FileWriter(fileExpected)) + + def writeBoth(s:String, asChars: Boolean) = { + if (asChars) { + sTest.write(s.toCharArray) + sExpected.write(s.toCharArray) + } else { + sTest.write(s) + sExpected.write(s) + } + } + + for (i <- 1 to 2000) { + writeBoth(s"line $i text;", true) + writeBoth(s"line $i chars", false) + sTest.newLine + sExpected.newLine + } + sTest.close() + sExpected.close() + + assertEquals(fileExpected.length(),fileTest.length()) + + val expIn = new BufferedReader(new FileReader(fileExpected)) + val testIn = new BufferedReader(new FileReader(fileTest)) + + var exp = expIn.readLine() + while (exp ne null) { + val actual = testIn.readLine() + assertEquals(exp, actual) + exp = expIn.readLine() + } + expIn.close() + testIn.close() + fileTest.delete() + fileExpected.delete() + } + + @Test def showPerformance: Unit = { + //warmup + for (i <- 1 to 1000) { + writeIsSame() + } + + val fileTest = File.createTempFile("FileUtilsTest", "t1") + val fileExpected = File.createTempFile("FileUtilsTest", "t2") + + for (i <- 1 to 10) { + val sTest = FileUtils.newAsyncBufferedWriter(fileTest.toPath) + val sExpected = new BufferedWriter(new FileWriter(fileExpected)) + + val t1 = System.nanoTime() + List.tabulate(10000) {i => + sTest.write(s"line $i text;") + sTest.newLine + } + val t2 = System.nanoTime() + sTest.close() + val t3 = System.nanoTime() + List.tabulate(10000) {i => + sExpected.write(s"line $i text;") + sExpected.newLine + } + val t4 = System.nanoTime() + sExpected.close() + + println(s"async took ${t2 - t1} ns") + println(s"buffered took ${t4 - t3} ns") + + fileTest.delete() + fileExpected.delete() + } + } + +} From 3d6ed6b14d00b75c8fb200cd498a2682e1fdbc27 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 25 Sep 2018 07:48:11 +1000 Subject: [PATCH 02/66] Add setting to restrict macro classpath For better cacheability (cherry picked from commit 24124d5fc7759b6bb60d767975ba2a4f197bf7bc) --- .../scala/tools/nsc/settings/ScalaSettings.scala | 2 ++ src/compiler/scala/tools/nsc/typechecker/Macros.scala | 10 +++++++++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 2783b74a9d5..42fb7d6d02b 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -243,6 +243,8 @@ trait ScalaSettings extends AbsScalaSettings val YpartialUnification = BooleanSetting ("-Ypartial-unification", "Enable partial unification in type constructor inference") val Yvirtpatmat = BooleanSetting ("-Yvirtpatmat", "Enable pattern matcher virtualization") + val YmacroClasspath = PathSetting ("-Ymacro-classpath", "The classpath used to reflectively load macro implementations", "") + val exposeEmptyPackage = BooleanSetting ("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly() val Ydelambdafy = ChoiceSetting ("-Ydelambdafy", "strategy", "Strategy used for translating lambdas into JVM code.", List("inline", "method"), "method") diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 1755042d339..bbc2cfecc8e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -14,6 +14,8 @@ package scala.tools.nsc package typechecker import java.lang.Math.min +import java.net.URL + import symtab.Flags._ import scala.reflect.internal.util.ScalaClassLoader import scala.reflect.runtime.ReflectionUtils @@ -22,6 +24,7 @@ import scala.reflect.internal.TypesStats import scala.reflect.macros.util._ import scala.util.control.ControlThrowable import scala.reflect.internal.util.ListOfNil +import scala.reflect.io.AbstractFile import scala.reflect.macros.runtime.{AbortMacroException, MacroRuntimes} import scala.reflect.macros.compiler.DefaultMacroCompiler import scala.tools.reflect.FastTrack @@ -75,7 +78,12 @@ trait Macros extends MacroRuntimes with Traces with Helpers { * Mirrors with runtime definitions (e.g. Repl) need to adjust this method. */ protected def findMacroClassLoader(): ClassLoader = { - val classpath = global.classPath.asURLs + val classpath: Seq[URL] = if (settings.YmacroClasspath.isSetByUser) { + for { + file <- scala.tools.nsc.util.ClassPath.expandPath(settings.YmacroClasspath.value, true) + af <- Option(AbstractFile getDirectory file) + } yield af.file.toURI.toURL + } else global.classPath.asURLs def newLoader = () => { macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath)) ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader) From 4eca38e395f88551a8e2e595e58bb647b53f2c06 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 11 Oct 2018 17:50:57 +1000 Subject: [PATCH 03/66] Move macro classloader cache to top level object. (cherry picked from commit 5175550b16bff48cef6002c472baba8de5101649) --- .../scala/reflect/macros/runtime/MacroRuntimes.scala | 12 ++++++++---- .../nsc/classpath/ZipAndJarFileLookupFactory.scala | 1 + .../scala/tools/nsc/typechecker/Macros.scala | 11 +++++++---- 3 files changed, 16 insertions(+), 8 deletions(-) diff --git a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala index 73520dffb92..c34d85a603b 100644 --- a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala +++ b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala @@ -60,10 +60,14 @@ trait MacroRuntimes extends JavaReflectionRuntimes { * a classloader mapped to that virtual directory. */ private lazy val defaultMacroClassloaderCache = { - def attemptClose(loader: ClassLoader): Unit = loader match { - case u: URLClassLoader => debuglog("Closing macro runtime classloader"); u.close() - case afcl: AbstractFileClassLoader => attemptClose(afcl.getParent) - case _ => ??? + def attemptClose(loader: ClassLoader): Unit = { + if (!scala.tools.nsc.typechecker.Macros.macroClassLoadersCache.owns(loader)) { + loader match { + case u: URLClassLoader => debuglog("Closing macro runtime classloader"); u.close() + case afcl: AbstractFileClassLoader => attemptClose(afcl.getParent) + case _ => ??? + } + } } perRunCaches.newGeneric(findMacroClassLoader, attemptClose _) } diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala index 6f8b9a55c0c..2c1d39ea151 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala @@ -190,6 +190,7 @@ final class FileBasedCache[T] { import java.nio.file.Path private case class Stamp(lastModified: FileTime, fileKey: Object) private val cache = collection.mutable.Map.empty[Seq[Path], (Seq[Stamp], T)] + def owns(t: T): Boolean = cache.valuesIterator.exists(_._2.asInstanceOf[AnyRef] eq t.asInstanceOf[AnyRef]) def getOrCreate(paths: Seq[Path], create: () => T): T = cache.synchronized { val stamps = paths.map { path => diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index bbc2cfecc8e..3dfddfe63f1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -67,9 +67,6 @@ trait Macros extends MacroRuntimes with Traces with Helpers { def globalSettings = global.settings - private final val macroClassLoadersCache = - new scala.tools.nsc.classpath.FileBasedCache[ScalaClassLoader]() - /** Obtains a `ClassLoader` instance used for macro expansion. * * By default a new `ScalaClassLoader` is created using the classpath @@ -109,7 +106,7 @@ trait Macros extends MacroRuntimes with Traces with Helpers { macroLogVerbose(s"macro classloader: caching is disabled because the following paths are not supported: ${nonJarZips.mkString(",")}.") newLoader() } else { - macroClassLoadersCache.getOrCreate(locations.map(_.jfile.toPath()), newLoader) + Macros.macroClassLoadersCache.getOrCreate(locations.map(_.jfile.toPath()), newLoader) } } } @@ -971,6 +968,12 @@ trait Macros extends MacroRuntimes with Traces with Helpers { }.transform(expandee) } +object Macros { + final val macroClassLoadersCache = + new scala.tools.nsc.classpath.FileBasedCache[ScalaClassLoader]() + +} + trait MacrosStats { self: TypesStats with Statistics => val macroExpandCount = newCounter ("#macro expansions", "typer") From b029ae69c98dac5dcb9361d2f0ee364663633e8e Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 15 Oct 2018 16:09:13 +1000 Subject: [PATCH 04/66] Allow build tools to customize plugin classloader creation An extension point analagous to `findMacroClassLoader` (cherry picked from commit 01a9dbd8261c10ec9e7b7f4061e36c862d87af75) --- src/compiler/scala/tools/nsc/plugins/Plugin.scala | 8 ++++---- src/compiler/scala/tools/nsc/plugins/Plugins.scala | 6 +++++- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala index b76f67ccf6a..94025274d7a 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala @@ -103,7 +103,7 @@ object Plugin { * mitigate the cost of dynamic classloading as it has been * measured in https://github.com/scala/scala-dev/issues/458. */ - private def loaderFor(locations: Seq[Path], disableCache: Boolean): ScalaClassLoader = { + def loaderFor(locations: Seq[Path], disableCache: Boolean): ScalaClassLoader = { def newLoader = () => { val compilerLoader = classOf[Plugin].getClassLoader val urls = locations map (_.toURL) @@ -155,7 +155,7 @@ object Plugin { paths: List[List[Path]], dirs: List[Path], ignoring: List[String], - disableClassLoaderCache: Boolean): List[Try[AnyClass]] = + findPluginClassloader: (Seq[Path] => ClassLoader)): List[Try[AnyClass]] = { // List[(jar, Try(descriptor))] in dir def scan(d: Directory) = @@ -166,7 +166,7 @@ object Plugin { // scan plugin dirs for jars containing plugins, ignoring dirs with none and other jars val fromDirs: PDResults = dirs filter (_.isDirectory) flatMap { d => scan(d.toDirectory) collect { - case (j, Success(pd)) => Success((pd, loaderFor(Seq(j), disableClassLoaderCache))) + case (j, Success(pd)) => Success((pd, findPluginClassloader(Seq(j)))) } } @@ -183,7 +183,7 @@ object Plugin { loop(ps) } val fromPaths: PDResults = paths map (p => (p, findDescriptor(p))) map { - case (p, Success(pd)) => Success((pd, loaderFor(p, disableClassLoaderCache))) + case (p, Success(pd)) => Success((pd, findPluginClassloader(p))) case (_, Failure(e)) => Failure(e) } diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index bba855ba541..aa95d3dcadb 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -37,7 +37,7 @@ trait Plugins { global: Global => def injectDefault(s: String) = if (s.isEmpty) Defaults.scalaPluginPath else s asPath(settings.pluginsDir.value) map injectDefault map Path.apply } - val maybes = Plugin.loadAllFrom(paths, dirs, settings.disable.value, settings.YcachePluginClassLoader.value == settings.CachePolicy.None.name) + val maybes = Plugin.loadAllFrom(paths, dirs, settings.disable.value, findPluginClassLoader(_)) val (goods, errors) = maybes partition (_.isSuccess) // Explicit parameterization of recover to avoid -Xlint warning about inferred Any errors foreach (_.recover[Any] { @@ -53,6 +53,10 @@ trait Plugins { global: Global => classes map (Plugin.instantiate(_, this)) } + protected def findPluginClassLoader(classpath: Seq[Path]): ClassLoader = { + Plugin.loaderFor(classpath, settings.YcachePluginClassLoader.value == settings.CachePolicy.None.name) + } + protected lazy val roughPluginsList: List[Plugin] = loadRoughPluginsList() /** Load all available plugins. Skips plugins that From fa53310833d6f5d90e5e4b2b09ff00a27139668e Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 15 Oct 2018 17:53:34 +1000 Subject: [PATCH 05/66] Read plugin descriptor via Classloader.getResource This lets customized Global's deliver this file, and offloads the scanning logic. (cherry picked from commit bec58990dda5be70ab34b152537e0f8d102084ee) --- .../scala/tools/nsc/plugins/Plugin.scala | 40 ++++------- .../scala/tools/nsc/plugins/Plugins.scala | 50 +++++++++++++ .../scala/tools/nsc/typechecker/Macros.scala | 45 ------------ .../scala/tools/reflect/ReflectGlobal.scala | 20 +++--- .../tools/nsc/interpreter/ReplGlobal.scala | 15 ++-- .../nsc/GlobalCustomizeClassloaderTest.scala | 71 +++++++++++++++++++ 6 files changed, 148 insertions(+), 93 deletions(-) create mode 100644 test/junit/scala/tools/nsc/GlobalCustomizeClassloaderTest.scala diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala index 94025274d7a..768ddf9fd2d 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala @@ -17,7 +17,9 @@ import scala.tools.nsc.io.Jar import scala.reflect.internal.util.ScalaClassLoader import scala.reflect.io.{Directory, File, Path} import java.io.InputStream +import java.net.URL +import scala.collection.JavaConverters._ import scala.collection.mutable import scala.tools.nsc.classpath.FileBasedCache import scala.util.{Failure, Success, Try} @@ -157,38 +159,24 @@ object Plugin { ignoring: List[String], findPluginClassloader: (Seq[Path] => ClassLoader)): List[Try[AnyClass]] = { - // List[(jar, Try(descriptor))] in dir - def scan(d: Directory) = - d.files.toList sortBy (_.name) filter (Jar isJarOrZip _) map (j => (j, loadDescriptionFromJar(j))) - type PDResults = List[Try[(PluginDescription, ScalaClassLoader)]] - // scan plugin dirs for jars containing plugins, ignoring dirs with none and other jars - val fromDirs: PDResults = dirs filter (_.isDirectory) flatMap { d => - scan(d.toDirectory) collect { - case (j, Success(pd)) => Success((pd, findPluginClassloader(Seq(j)))) + val fromLoaders = paths.map {path => + val loader = findPluginClassloader(path) + loader.getResource(PluginXML) match { + case null => Failure(new MissingPluginException(path)) + case url => + val inputStream = url.openStream + try { + Try((PluginDescription.fromXML(inputStream), loader)) + } finally { + inputStream.close() + } } } - // scan jar paths for plugins, taking the first plugin you find. - // a path element can be either a plugin.jar or an exploded dir. - def findDescriptor(ps: List[Path]) = { - def loop(qs: List[Path]): Try[PluginDescription] = qs match { - case Nil => Failure(new MissingPluginException(ps)) - case p :: rest => - if (p.isDirectory) loadDescriptionFromFile(p.toDirectory / PluginXML) orElse loop(rest) - else if (p.isFile) loadDescriptionFromJar(p.toFile) orElse loop(rest) - else loop(rest) - } - loop(ps) - } - val fromPaths: PDResults = paths map (p => (p, findDescriptor(p))) map { - case (p, Success(pd)) => Success((pd, findPluginClassloader(p))) - case (_, Failure(e)) => Failure(e) - } - val seen = mutable.HashSet[String]() - val enabled = (fromPaths ::: fromDirs) map { + val enabled = fromLoaders map { case Success((pd, loader)) if seen(pd.classname) => // a nod to scala/bug#7494, take the plugin classes distinctly Failure(new PluginLoadException(pd.name, s"Ignoring duplicate plugin ${pd.name} (${pd.classname})")) diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index aa95d3dcadb..9df0d0fc01c 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -13,7 +13,12 @@ package scala.tools.nsc package plugins +import java.net.URL + +import scala.reflect.internal.util.ScalaClassLoader import scala.reflect.io.Path +import scala.tools.nsc +import scala.tools.nsc.typechecker.Macros import scala.tools.nsc.util.ClassPath import scala.tools.util.PathResolver.Defaults @@ -127,4 +132,49 @@ trait Plugins { global: Global => (for (plug <- roughPluginsList ; help <- plug.optionsHelp) yield { "\nOptions for plugin '%s':\n%s\n".format(plug.name, help) }).mkString + + /** Obtains a `ClassLoader` instance used for macro expansion. + * + * By default a new `ScalaClassLoader` is created using the classpath + * from global and the classloader of self as parent. + * + * Mirrors with runtime definitions (e.g. Repl) need to adjust this method. + */ + protected[scala] def findMacroClassLoader(): ClassLoader = { + val classpath: Seq[URL] = if (settings.YmacroClasspath.isSetByUser) { + for { + file <- scala.tools.nsc.util.ClassPath.expandPath(settings.YmacroClasspath.value, true) + af <- Option(nsc.io.AbstractFile getDirectory file) + } yield af.file.toURI.toURL + } else global.classPath.asURLs + def newLoader = () => { + analyzer.macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath)) + ScalaClassLoader.fromURLs(classpath, getClass.getClassLoader) + } + + val disableCache = settings.YcacheMacroClassLoader.value == settings.CachePolicy.None.name + if (disableCache) newLoader() + else { + import scala.tools.nsc.io.Jar + import scala.reflect.io.{AbstractFile, Path} + + val urlsAndFiles = classpath.map(u => u -> AbstractFile.getURL(u)) + val hasNullURL = urlsAndFiles.filter(_._2 eq null) + if (hasNullURL.nonEmpty) { + // TODO if the only null is jrt:// we can still cache + // TODO filter out classpath elements pointing to non-existing files before we get here, that's another source of null + analyzer.macroLogVerbose(s"macro classloader: caching is disabled because `AbstractFile.getURL` returned `null` for ${hasNullURL.map(_._1).mkString(", ")}.") + newLoader() + } else { + val locations = urlsAndFiles.map(t => Path(t._2.file)) + val nonJarZips = locations.filterNot(Jar.isJarOrZip(_)) + if (nonJarZips.nonEmpty) { + analyzer.macroLogVerbose(s"macro classloader: caching is disabled because the following paths are not supported: ${nonJarZips.mkString(",")}.") + newLoader() + } else { + Macros.macroClassLoadersCache.getOrCreate(locations.map(_.jfile.toPath()), newLoader) + } + } + } + } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 3dfddfe63f1..f649b924e46 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -67,51 +67,6 @@ trait Macros extends MacroRuntimes with Traces with Helpers { def globalSettings = global.settings - /** Obtains a `ClassLoader` instance used for macro expansion. - * - * By default a new `ScalaClassLoader` is created using the classpath - * from global and the classloader of self as parent. - * - * Mirrors with runtime definitions (e.g. Repl) need to adjust this method. - */ - protected def findMacroClassLoader(): ClassLoader = { - val classpath: Seq[URL] = if (settings.YmacroClasspath.isSetByUser) { - for { - file <- scala.tools.nsc.util.ClassPath.expandPath(settings.YmacroClasspath.value, true) - af <- Option(AbstractFile getDirectory file) - } yield af.file.toURI.toURL - } else global.classPath.asURLs - def newLoader = () => { - macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath)) - ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader) - } - - val disableCache = settings.YcacheMacroClassLoader.value == settings.CachePolicy.None.name - if (disableCache) newLoader() - else { - import scala.tools.nsc.io.Jar - import scala.reflect.io.{AbstractFile, Path} - - val urlsAndFiles = classpath.map(u => u -> AbstractFile.getURL(u)) - val hasNullURL = urlsAndFiles.filter(_._2 eq null) - if (hasNullURL.nonEmpty) { - // TODO if the only null is jrt:// we can still cache - // TODO filter out classpath elements pointing to non-existing files before we get here, that's another source of null - macroLogVerbose(s"macro classloader: caching is disabled because `AbstractFile.getURL` returned `null` for ${hasNullURL.map(_._1).mkString(", ")}.") - newLoader() - } else { - val locations = urlsAndFiles.map(t => Path(t._2.file)) - val nonJarZips = locations.filterNot(Jar.isJarOrZip(_)) - if (nonJarZips.nonEmpty) { - macroLogVerbose(s"macro classloader: caching is disabled because the following paths are not supported: ${nonJarZips.mkString(",")}.") - newLoader() - } else { - Macros.macroClassLoadersCache.getOrCreate(locations.map(_.jfile.toPath()), newLoader) - } - } - } - } - /** `MacroImplBinding` and its companion module are responsible for * serialization/deserialization of macro def -> impl bindings. * diff --git a/src/compiler/scala/tools/reflect/ReflectGlobal.scala b/src/compiler/scala/tools/reflect/ReflectGlobal.scala index e1cf834c6fb..5985b82bff9 100644 --- a/src/compiler/scala/tools/reflect/ReflectGlobal.scala +++ b/src/compiler/scala/tools/reflect/ReflectGlobal.scala @@ -25,18 +25,14 @@ import scala.tools.nsc.typechecker.Analyzer class ReflectGlobal(currentSettings: Settings, reporter: Reporter, override val rootClassLoader: ClassLoader) extends Global(currentSettings, reporter) with scala.tools.reflect.ReflectSetup with scala.reflect.runtime.SymbolTable { - override lazy val analyzer = new { - val global: ReflectGlobal.this.type = ReflectGlobal.this - } with Analyzer { - /** Obtains the classLoader used for runtime macro expansion. - * - * Macro expansion can use everything available in [[global.classPath]] or [[rootClassLoader]]. - * The [[rootClassLoader]] is used to obtain runtime defined macros. - */ - override protected def findMacroClassLoader(): ClassLoader = { - val classpath = global.classPath.asURLs - ScalaClassLoader.fromURLs(classpath, rootClassLoader) - } + /** Obtains the classLoader used for runtime macro expansion. + * + * Macro expansion can use everything available in `global.classPath` or `rootClassLoader`. + * The `rootClassLoader` is used to obtain runtime defined macros. + */ + override protected[scala] def findMacroClassLoader(): ClassLoader = { + val classpath = classPath.asURLs + ScalaClassLoader.fromURLs(classpath, rootClassLoader) } override def transformedType(sym: Symbol) = diff --git a/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala index 1273d6ac92f..42490c604e1 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala @@ -30,16 +30,11 @@ trait ReplGlobal extends Global { super.abort(msg) } - override lazy val analyzer = new { - val global: ReplGlobal.this.type = ReplGlobal.this - } with Analyzer { - - override protected def findMacroClassLoader(): ClassLoader = { - val loader = super.findMacroClassLoader - macroLogVerbose("macro classloader: initializing from a REPL classloader: %s".format(global.classPath.asURLs)) - val virtualDirectory = globalSettings.outputDirs.getSingleOutput.get - new util.AbstractFileClassLoader(virtualDirectory, loader) {} - } + override protected[scala] def findMacroClassLoader(): ClassLoader = { + val loader = super.findMacroClassLoader + analyzer.macroLogVerbose("macro classloader: initializing from a REPL classloader: %s".format(classPath.asURLs)) + val virtualDirectory = analyzer.globalSettings.outputDirs.getSingleOutput.get + new util.AbstractFileClassLoader(virtualDirectory, loader) {} } override def optimizerClassPath(base: ClassPath): ClassPath = { diff --git a/test/junit/scala/tools/nsc/GlobalCustomizeClassloaderTest.scala b/test/junit/scala/tools/nsc/GlobalCustomizeClassloaderTest.scala new file mode 100644 index 00000000000..0e868bd2872 --- /dev/null +++ b/test/junit/scala/tools/nsc/GlobalCustomizeClassloaderTest.scala @@ -0,0 +1,71 @@ +package scala.tools.nsc + +import org.junit.{Assert, Test} +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.reflect.internal.util.{AbstractFileClassLoader, NoSourceFile} +import scala.reflect.io.{Path, VirtualDirectory} +import scala.tools.nsc.plugins.{Plugin, PluginComponent} + +@RunWith(classOf[JUnit4]) +class GlobalCustomizeClassloaderTest { + // Demonstrate extension points to customise creation of the classloaders used to load compiler + // plugins and macro implementations. + // + // A use case could be for a build tool to take control of caching of these classloaders in a way + // that properly closes them before one of the elements needs to be overwritten. + @Test def test(): Unit = { + val g = new Global(new Settings) { + override protected[scala] def findMacroClassLoader(): ClassLoader = getClass.getClassLoader + override protected def findPluginClassLoader(classpath: Seq[Path]): ClassLoader = { + val d = new VirtualDirectory("", None) + val xml = d.fileNamed("scalac-plugin.xml") + val out = xml.bufferedOutput + out.write( + s""" + |sample-plugin + |${classOf[SamplePlugin].getName} + | + |""".stripMargin.getBytes()) + out.close() + new AbstractFileClassLoader(d, getClass.getClassLoader) + } + } + g.settings.usejavacp.value = true + g.settings.plugin.value = List("sample") + new g.Run + assert(g.settings.log.value == List("typer")) + + val unit = new g.CompilationUnit(NoSourceFile) + val context = g.analyzer.rootContext(unit) + val typer = g.analyzer.newTyper(context) + import g._ + SampleMacro.data = "in this classloader" + val typed = typer.typed(q"scala.tools.nsc.SampleMacro.m") + assert(!reporter.hasErrors) + typed match { + case Typed(Literal(Constant(s: String)), _) => Assert.assertEquals(SampleMacro.data, s) + case _ => Assert.fail() + } + } +} + +object SampleMacro { + var data: String = _ + import language.experimental.macros + import scala.reflect.macros.blackbox.Context + def m: String = macro impl + def impl(c: Context): c.Tree = c.universe.Literal(c.universe.Constant(data)) +} + +class SamplePlugin(val global: Global) extends Plugin { + override val name: String = "sample" + override val description: String = "sample" + override val components: List[PluginComponent] = Nil + override def init(options: List[String], error: String => Unit): Boolean = { + val result = super.init(options, error) + global.settings.log.value = List("typer") + result + } +} From 6e5fd006d9592f3ec3e08282b8a462fab70e10f6 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 16 Oct 2018 16:11:04 +1000 Subject: [PATCH 06/66] Rework closure of macro classloaders (cherry picked from commit 837f9249d37e656a75e4faf18d54553a13a5046d) --- .../reflect/macros/runtime/MacroRuntimes.scala | 17 +---------------- .../classpath/ZipAndJarFileLookupFactory.scala | 1 - .../scala/tools/nsc/plugins/Plugins.scala | 4 ++-- .../scala/tools/reflect/ReflectGlobal.scala | 2 +- .../scala/reflect/internal/SymbolTable.scala | 18 ++++++++++++++++++ 5 files changed, 22 insertions(+), 20 deletions(-) diff --git a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala index c34d85a603b..557385744ac 100644 --- a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala +++ b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala @@ -54,23 +54,8 @@ trait MacroRuntimes extends JavaReflectionRuntimes { /** Macro classloader that is used to resolve and run macro implementations. * Loads classes from from -cp (aka the library classpath). * Is also capable of detecting REPL and reusing its classloader. - * - * When -Xmacro-jit is enabled, we sometimes fallback to on-the-fly compilation of macro implementations, - * which compiles implementations into a virtual directory (very much like REPL does) and then conjures - * a classloader mapped to that virtual directory. */ - private lazy val defaultMacroClassloaderCache = { - def attemptClose(loader: ClassLoader): Unit = { - if (!scala.tools.nsc.typechecker.Macros.macroClassLoadersCache.owns(loader)) { - loader match { - case u: URLClassLoader => debuglog("Closing macro runtime classloader"); u.close() - case afcl: AbstractFileClassLoader => attemptClose(afcl.getParent) - case _ => ??? - } - } - } - perRunCaches.newGeneric(findMacroClassLoader, attemptClose _) - } + private lazy val defaultMacroClassloaderCache: () => ClassLoader = perRunCaches.newGeneric(findMacroClassLoader()) def defaultMacroClassloader: ClassLoader = defaultMacroClassloaderCache() /** Abstracts away resolution of macro runtimes. diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala index 2c1d39ea151..6f8b9a55c0c 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala @@ -190,7 +190,6 @@ final class FileBasedCache[T] { import java.nio.file.Path private case class Stamp(lastModified: FileTime, fileKey: Object) private val cache = collection.mutable.Map.empty[Seq[Path], (Seq[Stamp], T)] - def owns(t: T): Boolean = cache.valuesIterator.exists(_._2.asInstanceOf[AnyRef] eq t.asInstanceOf[AnyRef]) def getOrCreate(paths: Seq[Path], create: () => T): T = cache.synchronized { val stamps = paths.map { path => diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index 9df0d0fc01c..119bf075bdd 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -164,13 +164,13 @@ trait Plugins { global: Global => // TODO if the only null is jrt:// we can still cache // TODO filter out classpath elements pointing to non-existing files before we get here, that's another source of null analyzer.macroLogVerbose(s"macro classloader: caching is disabled because `AbstractFile.getURL` returned `null` for ${hasNullURL.map(_._1).mkString(", ")}.") - newLoader() + perRunCaches.recordClassloader(newLoader()) } else { val locations = urlsAndFiles.map(t => Path(t._2.file)) val nonJarZips = locations.filterNot(Jar.isJarOrZip(_)) if (nonJarZips.nonEmpty) { analyzer.macroLogVerbose(s"macro classloader: caching is disabled because the following paths are not supported: ${nonJarZips.mkString(",")}.") - newLoader() + perRunCaches.recordClassloader(newLoader()) } else { Macros.macroClassLoadersCache.getOrCreate(locations.map(_.jfile.toPath()), newLoader) } diff --git a/src/compiler/scala/tools/reflect/ReflectGlobal.scala b/src/compiler/scala/tools/reflect/ReflectGlobal.scala index 5985b82bff9..2efd699e9f4 100644 --- a/src/compiler/scala/tools/reflect/ReflectGlobal.scala +++ b/src/compiler/scala/tools/reflect/ReflectGlobal.scala @@ -32,7 +32,7 @@ class ReflectGlobal(currentSettings: Settings, reporter: Reporter, override val */ override protected[scala] def findMacroClassLoader(): ClassLoader = { val classpath = classPath.asURLs - ScalaClassLoader.fromURLs(classpath, rootClassLoader) + perRunCaches.recordClassloader(ScalaClassLoader.fromURLs(classpath, rootClassLoader)) } override def transformedType(sym: Symbol) = diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 99fd5edd7ac..0645e60659c 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -14,6 +14,8 @@ package scala package reflect package internal +import java.net.URLClassLoader + import scala.annotation.elidable import scala.collection.mutable import util._ @@ -416,6 +418,22 @@ abstract class SymbolTable extends macros.Universe cache } + /** Closes the provided classloader at the conclusion of this Run */ + final def recordClassloader(loader: ClassLoader): ClassLoader = { + def attemptClose(loader: ClassLoader): Unit = { + loader match { + case u: URLClassLoader => debuglog("Closing classloader " + u); u.close() + case _ => + } + } + caches ::= new WeakReference((new Clearable { + def clear(): Unit = { + attemptClose(loader) + } + })) + loader + } + /** * Removes a cache from the per-run caches. This is useful for testing: it allows running the * compiler and then inspect the state of a cache. From 33635526a2d24f6d8ce7d8daebf51af024c21ffc Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 16 Oct 2018 17:04:36 +1000 Subject: [PATCH 07/66] Make Global closeable Close Global-scoped JARs and ClassLoaders. Entries of FileBasedCache that have a lifetime that exceeds any single Global instance are reference counted. When the count hits zero, it is closed if no references appear after a delay. (cherry picked from commit deb10d4af2fc39e75337791deb3c0b6e5a74386a) --- .../scala/tools/nsc/CloseableRegistry.scala | 34 ++++++ .../tools/nsc/GenericRunnerSettings.scala | 9 +- src/compiler/scala/tools/nsc/Global.scala | 14 ++- .../tools/nsc/backend/JavaPlatform.scala | 2 +- .../nsc/classpath/AggregateClassPath.scala | 1 - .../nsc/classpath/ClassPathFactory.scala | 12 +- .../nsc/classpath/DirectoryClassPath.scala | 16 ++- .../classpath/VirtualDirectoryClassPath.scala | 1 - .../ZipAndJarFileLookupFactory.scala | 107 ++++++++++++++---- .../nsc/classpath/ZipArchiveFileLookup.scala | 6 +- .../scala/tools/nsc/plugins/Plugin.scala | 21 +--- .../scala/tools/nsc/plugins/Plugins.scala | 35 +++++- .../scala/tools/nsc/typechecker/Macros.scala | 3 +- .../scala/tools/nsc/util/ClassPath.scala | 2 + .../scala/tools/reflect/ReflectMain.scala | 6 +- .../scala/tools/util/PathResolver.scala | 30 +++-- .../scala/tools/partest/BytecodeTest.scala | 3 +- src/reflect/scala/reflect/io/ZipArchive.scala | 17 +++ .../scala/tools/nsc/interpreter/IMain.scala | 8 +- .../interpreter/PresentationCompilation.scala | 11 +- .../tools/nsc/interpreter/ReplGlobal.scala | 2 +- src/scalap/scala/tools/scalap/Main.scala | 29 +++-- .../nsc/GlobalCustomizeClassloaderTest.scala | 1 + .../nsc/classpath/JrtClassPathTest.scala | 11 +- .../nsc/classpath/PathResolverBaseTest.scala | 6 +- .../ZipAndJarFileLookupFactoryTest.scala | 8 +- .../symtab/SymbolTableForUnitTesting.scala | 2 +- 27 files changed, 288 insertions(+), 109 deletions(-) create mode 100644 src/compiler/scala/tools/nsc/CloseableRegistry.scala diff --git a/src/compiler/scala/tools/nsc/CloseableRegistry.scala b/src/compiler/scala/tools/nsc/CloseableRegistry.scala new file mode 100644 index 00000000000..9812a213626 --- /dev/null +++ b/src/compiler/scala/tools/nsc/CloseableRegistry.scala @@ -0,0 +1,34 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc + +import scala.util.control.NonFatal + +/** Registry for resources to close when `Global` is closed */ +final class CloseableRegistry { + private[this] var closeables: List[java.io.Closeable] = Nil + final def registerClosable(c: java.io.Closeable): Unit = { + closeables ::= c + } + + def close(): Unit = { + for (c <- closeables) { + try { + c.close() + } catch { + case NonFatal(_) => + } + } + closeables = Nil + } +} diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala index cb26b4d9d66..d7e379b58eb 100644 --- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala +++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala @@ -16,7 +16,14 @@ import java.net.URL import scala.tools.util.PathResolver class GenericRunnerSettings(error: String => Unit) extends Settings(error) { - lazy val classpathURLs: Seq[URL] = new PathResolver(this).resultAsURLs + lazy val classpathURLs: Seq[URL] = { + val registry = new CloseableRegistry + try { + new PathResolver(this, new CloseableRegistry).resultAsURLs + } finally { + registry.close() + } + } val howtorun = ChoiceSetting( diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 79358c172df..6ca1e922e62 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -39,9 +39,12 @@ import scala.language.postfixOps import scala.tools.nsc.ast.{TreeGen => AstTreeGen} import scala.tools.nsc.classpath._ import scala.tools.nsc.profile.Profiler +import scala.util.control.NonFatal +import java.io.Closeable class Global(var currentSettings: Settings, reporter0: Reporter) extends SymbolTable + with Closeable with CompilationUnits with Plugins with PhaseAssembly @@ -816,7 +819,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) /** Extend classpath of `platform` and rescan updated packages. */ def extendCompilerClassPath(urls: URL*): Unit = { - val urlClasspaths = urls.map(u => ClassPathFactory.newClassPath(AbstractFile.getURL(u), settings)) + val urlClasspaths = urls.map(u => ClassPathFactory.newClassPath(AbstractFile.getURL(u), settings, closeableRegistry)) val newClassPath = AggregateClassPath.createAggregate(platform.classPath +: urlClasspaths : _*) platform.currentClassPath = Some(newClassPath) invalidateClassPathEntries(urls.map(_.getPath): _*) @@ -878,7 +881,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) } entries(classPath) find matchesCanonical match { case Some(oldEntry) => - Some(oldEntry -> ClassPathFactory.newClassPath(dir, settings)) + Some(oldEntry -> ClassPathFactory.newClassPath(dir, settings, closeableRegistry)) case None => error(s"Error adding entry to classpath. During invalidation, no entry named $path in classpath $classPath") None @@ -1682,6 +1685,13 @@ class Global(var currentSettings: Settings, reporter0: Reporter) } def createJavadoc = false + + final val closeableRegistry: CloseableRegistry = new CloseableRegistry + + def close(): Unit = { + perRunCaches.clearAll() + closeableRegistry.close() + } } object Global { diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala index ff11f434710..05396fc6ce7 100644 --- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala +++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala @@ -27,7 +27,7 @@ trait JavaPlatform extends Platform { private[nsc] var currentClassPath: Option[ClassPath] = None private[nsc] def classPath: ClassPath = { - if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings).result) + if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings, global.closeableRegistry).result) currentClassPath.get } diff --git a/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala b/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala index 68fb3000b8c..ac4970f3cb6 100644 --- a/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala @@ -66,7 +66,6 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { override def asClassPathStrings: Seq[String] = aggregates.map(_.asClassPathString).distinct override def asSourcePathString: String = ClassPath.join(aggregates map (_.asSourcePathString): _*) - override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = { val aggregatedPackages = aggregates.flatMap(_.packages(inPackage)).distinct aggregatedPackages diff --git a/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala index fa916648359..f2fb2b0224d 100644 --- a/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala @@ -14,7 +14,7 @@ package scala.tools.nsc.classpath import scala.reflect.io.{AbstractFile, VirtualDirectory} import scala.reflect.io.Path.string2path -import scala.tools.nsc.Settings +import scala.tools.nsc.{CloseableRegistry, Settings} import FileUtils.AbstractFileOps import scala.tools.nsc.util.ClassPath @@ -22,11 +22,11 @@ import scala.tools.nsc.util.ClassPath * Provides factory methods for classpath. When creating classpath instances for a given path, * it uses proper type of classpath depending on a types of particular files containing sources or classes. */ -class ClassPathFactory(settings: Settings) { +class ClassPathFactory(settings: Settings, closeableRegistry: CloseableRegistry) { /** * Create a new classpath based on the abstract file. */ - def newClassPath(file: AbstractFile): ClassPath = ClassPathFactory.newClassPath(file, settings) + def newClassPath(file: AbstractFile): ClassPath = ClassPathFactory.newClassPath(file, settings, closeableRegistry) /** * Creators for sub classpaths which preserve this context. @@ -70,7 +70,7 @@ class ClassPathFactory(settings: Settings) { private def createSourcePath(file: AbstractFile): ClassPath = if (file.isJarOrZip) - ZipAndJarSourcePathFactory.create(file, settings) + ZipAndJarSourcePathFactory.create(file, settings, closeableRegistry) else if (file.isDirectory) DirectorySourcePath(file.file) else @@ -78,11 +78,11 @@ class ClassPathFactory(settings: Settings) { } object ClassPathFactory { - def newClassPath(file: AbstractFile, settings: Settings): ClassPath = file match { + def newClassPath(file: AbstractFile, settings: Settings, closeableRegistry: CloseableRegistry): ClassPath = file match { case vd: VirtualDirectory => VirtualDirectoryClassPath(vd) case _ => if (file.isJarOrZip) - ZipAndJarClassPathFactory.create(file, settings) + ZipAndJarClassPathFactory.create(file, settings, closeableRegistry) else if (file.isDirectory) DirectoryClassPath(file.file) else diff --git a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala index 9f51672e79a..c6ab18a1e48 100644 --- a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala @@ -12,7 +12,7 @@ package scala.tools.nsc.classpath -import java.io.File +import java.io.{Closeable, File} import java.net.{URI, URL} import java.nio.file.{FileSystems, Files, SimpleFileVisitor} import java.util.function.IntFunction @@ -25,6 +25,7 @@ import FileUtils._ import scala.collection.JavaConverters._ import scala.collection.immutable import scala.reflect.internal.JDK9Reflectors +import scala.tools.nsc.CloseableRegistry import scala.tools.nsc.classpath.PackageNameUtils.{packageContains, separatePkgAndClassNames} /** @@ -61,6 +62,7 @@ trait DirectoryLookup[FileEntryType <: ClassRepresentation] extends ClassPath { private[nsc] def packages(inPackage: String): Seq[PackageEntry] = { val dirForPackage = getDirectory(inPackage) + val nestedDirs: Array[F] = dirForPackage match { case None => emptyFiles case Some(directory) => listChildren(directory, Some(isPackage)) @@ -137,7 +139,7 @@ trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends Directo object JrtClassPath { import java.nio.file._, java.net.URI - def apply(release: Option[String]): Option[ClassPath] = { + def apply(release: Option[String], closeableRegistry: CloseableRegistry): Option[ClassPath] = { import scala.util.Properties._ if (!isJavaAtLeast("9")) None else { @@ -154,7 +156,11 @@ object JrtClassPath { try { val ctSym = Paths.get(javaHome).resolve("lib").resolve("ct.sym") if (Files.notExists(ctSym)) None - else Some(new CtSymClassPath(ctSym, v.toInt)) + else { + val classPath = new CtSymClassPath(ctSym, v.toInt) + closeableRegistry.registerClosable(classPath) + Some(classPath) + } } catch { case _: Throwable => None } @@ -230,7 +236,7 @@ final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with No /** * Implementation `ClassPath` based on the $JAVA_HOME/lib/ct.sym backing http://openjdk.java.net/jeps/247 */ -final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends ClassPath with NoSourcePaths { +final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends ClassPath with NoSourcePaths with Closeable { import java.nio.file.Path, java.nio.file._ private val fileSystem: FileSystem = FileSystems.newFileSystem(ctSym, null) @@ -276,7 +282,7 @@ final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends Clas def asURLs: Seq[URL] = Nil def asClassPathStrings: Seq[String] = Nil - + override def close(): Unit = fileSystem.close() def findClassFile(className: String): Option[AbstractFile] = { if (!className.contains(".")) None else { diff --git a/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala index 5b157e9b386..a4ba7cec2c4 100644 --- a/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala @@ -37,7 +37,6 @@ case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath wi // mimic the behavior of the old nsc.util.DirectoryClassPath def asURLs: Seq[URL] = Seq(new URL(dir.name)) def asClassPathStrings: Seq[String] = Seq(dir.path) - override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl def findClassFile(className: String): Option[AbstractFile] = { diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala index 6f8b9a55c0c..31855236150 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala @@ -12,15 +12,17 @@ package scala.tools.nsc.classpath -import java.io.File +import java.io.{Closeable, File} import java.net.URL import java.nio.file.Files import java.nio.file.attribute.{BasicFileAttributes, FileTime} +import java.util.{Timer, TimerTask} +import java.util.concurrent.atomic.AtomicInteger import scala.annotation.tailrec import scala.reflect.io.{AbstractFile, FileZipArchive, ManifestResources} import scala.tools.nsc.util.{ClassPath, ClassRepresentation} -import scala.tools.nsc.Settings +import scala.tools.nsc.{CloseableRegistry, Settings} import FileUtils._ /** @@ -29,18 +31,19 @@ import FileUtils._ * when there are a lot of projects having a lot of common dependencies. */ sealed trait ZipAndJarFileLookupFactory { - private val cache = new FileBasedCache[ClassPath] - - def create(zipFile: AbstractFile, settings: Settings): ClassPath = { - if (settings.YdisableFlatCpCaching || zipFile.file == null) createForZipFile(zipFile, settings.releaseValue) - else createUsingCache(zipFile, settings) + private val cache = new FileBasedCache[ClassPath with Closeable] + + def create(zipFile: AbstractFile, settings: Settings, closeableRegistry: CloseableRegistry): ClassPath = { + if (settings.YdisableFlatCpCaching || zipFile.file == null) { + val result: ClassPath with Closeable = createForZipFile(zipFile, settings.releaseValue) + closeableRegistry.registerClosable(result) + result + } else { + cache.getOrCreate(List(zipFile.file.toPath), () => createForZipFile(zipFile, settings.releaseValue), closeableRegistry) + } } - protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath - - private def createUsingCache(zipFile: AbstractFile, settings: Settings): ClassPath = { - cache.getOrCreate(List(zipFile.file.toPath), () => createForZipFile(zipFile, settings.releaseValue)) - } + protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath with Closeable } /** @@ -75,7 +78,7 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { * with a particularly prepared scala-library.jar. It should have all classes listed in the manifest like e.g. this entry: * Name: scala/Function2$mcFJD$sp.class */ - private case class ManifestResourcesClassPath(file: ManifestResources) extends ClassPath with NoSourcePaths { + private case class ManifestResourcesClassPath(file: ManifestResources) extends ClassPath with NoSourcePaths with Closeable { override def findClassFile(className: String): Option[AbstractFile] = { val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) classes(pkg).find(_.name == simpleClassName).map(_.file) @@ -84,6 +87,7 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { override def asClassPathStrings: Seq[String] = Seq(file.path) override def asURLs: Seq[URL] = file.toURLs() + override def close(): Unit = file.close() import ManifestResourcesClassPath.PackageFileInfo import ManifestResourcesClassPath.PackageInfo @@ -152,7 +156,7 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { case class PackageInfo(packageName: String, subpackages: List[AbstractFile]) } - override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath = + override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath with Closeable = if (zipFile.file == null) createWithoutUnderlyingFile(zipFile) else ZipArchiveClassPath(zipFile.file, release) @@ -183,15 +187,33 @@ object ZipAndJarSourcePathFactory extends ZipAndJarFileLookupFactory { override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isScalaOrJavaSource } - override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath = ZipArchiveSourcePath(zipFile.file) + override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath with Closeable = ZipArchiveSourcePath(zipFile.file) } final class FileBasedCache[T] { import java.nio.file.Path private case class Stamp(lastModified: FileTime, fileKey: Object) - private val cache = collection.mutable.Map.empty[Seq[Path], (Seq[Stamp], T)] + private case class Entry(stamps: Seq[Stamp], t: T) { + val referenceCount: AtomicInteger = new AtomicInteger(1) + def referenceCountDecrementer: Closeable = new Closeable { + var closed = false + override def close(): Unit = { + if (!closed) { + closed = true + val count = referenceCount.decrementAndGet() + if (count == 0) { + t match { + case cl: Closeable => FileBasedCache.deferredClose(referenceCount, cl) + case _ => + } + } + } + } + } + } + private val cache = collection.mutable.Map.empty[Seq[Path], Entry] - def getOrCreate(paths: Seq[Path], create: () => T): T = cache.synchronized { + def getOrCreate(paths: Seq[Path], create: () => T, closeableRegistry: CloseableRegistry): T = cache.synchronized { val stamps = paths.map { path => val attrs = Files.readAttributes(path, classOf[BasicFileAttributes]) val lastModified = attrs.lastModifiedTime() @@ -201,10 +223,34 @@ final class FileBasedCache[T] { } cache.get(paths) match { - case Some((cachedStamps, cached)) if cachedStamps == stamps => cached + case Some(e@Entry(cachedStamps, cached)) => + if (cachedStamps == stamps) { + // Cache hit + e.referenceCount.incrementAndGet() + closeableRegistry.registerClosable(e.referenceCountDecrementer) + cached + } else { + // Cache miss: we found an entry but the underlying files have been modified + cached match { + case c: Closeable => + if (e.referenceCount.get() == 0) { + c.close() + } else { + // TODO: What do do here? Maybe add to a list of closeables polled by a cleanup thread? + } + } + val value = create() + val entry = Entry(stamps, value) + cache.put(paths, entry) + closeableRegistry.registerClosable(entry.referenceCountDecrementer) + value + } case _ => + // Cache miss val value = create() - cache.put(paths, (stamps, value)) + val entry = Entry(stamps, value) + cache.put(paths, entry) + closeableRegistry.registerClosable(entry.referenceCountDecrementer) value } } @@ -215,3 +261,26 @@ final class FileBasedCache[T] { cache.clear() } } + +object FileBasedCache { + private val deferCloseMs = Integer.getInteger("scalac.filebasedcache.defer.close.ms", 1000) + private val timer: Option[Timer] = { + if (deferCloseMs > 0) + Some(new java.util.Timer(true)) + else None + } + private def deferredClose(referenceCount: AtomicInteger, closable: Closeable): Unit = { + timer match { + case Some(timer) => + val task = new TimerTask { + override def run(): Unit = { + if (referenceCount.get == 0) + closable.close() + } + } + timer.schedule(task, FileBasedCache.deferCloseMs.toLong) + case None => + closable.close() + } + } +} diff --git a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala index 32ec4cde448..c658d4c0166 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala @@ -12,7 +12,7 @@ package scala.tools.nsc.classpath -import java.io.File +import java.io.{Closeable, File} import java.net.URL import scala.collection.Seq import scala.reflect.io.AbstractFile @@ -25,7 +25,7 @@ import scala.tools.nsc.util.{ClassPath, ClassRepresentation} * It provides common logic for classes handling class and source files. * It's aware of things like e.g. META-INF directory which is correctly skipped. */ -trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends ClassPath { +trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends ClassPath with Closeable { val zipFile: File def release: Option[String] @@ -33,8 +33,8 @@ trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends ClassPa override def asURLs: Seq[URL] = Seq(zipFile.toURI.toURL) override def asClassPathStrings: Seq[String] = Seq(zipFile.getPath) - private val archive = new FileZipArchive(zipFile, release) + override def close(): Unit = archive.close() override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = { val prefix = PackageNameUtils.packagePrefix(inPackage) diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala index 768ddf9fd2d..910b2b258d6 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala @@ -95,26 +95,7 @@ object Plugin { private val PluginXML = "scalac-plugin.xml" - private val pluginClassLoadersCache = new FileBasedCache[ScalaClassLoader]() - - /** Create a class loader with the specified locations plus - * the loader that loaded the Scala compiler. - * - * If the class loader has already been created before and the - * file stamps are the same, the previous loader is returned to - * mitigate the cost of dynamic classloading as it has been - * measured in https://github.com/scala/scala-dev/issues/458. - */ - def loaderFor(locations: Seq[Path], disableCache: Boolean): ScalaClassLoader = { - def newLoader = () => { - val compilerLoader = classOf[Plugin].getClassLoader - val urls = locations map (_.toURL) - ScalaClassLoader fromURLs (urls, compilerLoader) - } - - if (disableCache || locations.exists(!Jar.isJarOrZip(_))) newLoader() - else pluginClassLoadersCache.getOrCreate(locations.map(_.jfile.toPath()), newLoader) - } + private[nsc] val pluginClassLoadersCache = new FileBasedCache[ScalaClassLoader.URLClassLoader]() /** Try to load a plugin description from the specified location. */ diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index 119bf075bdd..ac8f301885b 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -18,6 +18,8 @@ import java.net.URL import scala.reflect.internal.util.ScalaClassLoader import scala.reflect.io.Path import scala.tools.nsc +import scala.tools.nsc.io.Jar +import scala.tools.nsc.plugins.Plugin.pluginClassLoadersCache import scala.tools.nsc.typechecker.Macros import scala.tools.nsc.util.ClassPath import scala.tools.util.PathResolver.Defaults @@ -58,8 +60,35 @@ trait Plugins { global: Global => classes map (Plugin.instantiate(_, this)) } + /** + * Locate or create the classloader to load a compiler plugin with `classpath`. + * + * Subclasses may override to customise the behaviour. + * + * @param classpath + * @return + */ protected def findPluginClassLoader(classpath: Seq[Path]): ClassLoader = { - Plugin.loaderFor(classpath, settings.YcachePluginClassLoader.value == settings.CachePolicy.None.name) + val disableCache = settings.YcachePluginClassLoader.value == settings.CachePolicy.None.name + def newLoader = () => { + val compilerLoader = classOf[Plugin].getClassLoader + val urls = classpath map (_.toURL) + ScalaClassLoader fromURLs (urls, compilerLoader) + } + + // Create a class loader with the specified locations plus + // the loader that loaded the Scala compiler. + // + // If the class loader has already been created before and the + // file stamps are the same, the previous loader is returned to + // mitigate the cost of dynamic classloading as it has been + // measured in https://github.com/scala/scala-dev/issues/458. + + if (disableCache || classpath.exists(!Jar.isJarOrZip(_))) { + val loader = newLoader() + closeableRegistry.registerClosable(loader) + loader + } else pluginClassLoadersCache.getOrCreate(classpath.map(_.jfile.toPath()), newLoader, closeableRegistry) } protected lazy val roughPluginsList: List[Plugin] = loadRoughPluginsList() @@ -147,7 +176,7 @@ trait Plugins { global: Global => af <- Option(nsc.io.AbstractFile getDirectory file) } yield af.file.toURI.toURL } else global.classPath.asURLs - def newLoader = () => { + def newLoader: () => ScalaClassLoader.URLClassLoader = () => { analyzer.macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath)) ScalaClassLoader.fromURLs(classpath, getClass.getClassLoader) } @@ -172,7 +201,7 @@ trait Plugins { global: Global => analyzer.macroLogVerbose(s"macro classloader: caching is disabled because the following paths are not supported: ${nonJarZips.mkString(",")}.") perRunCaches.recordClassloader(newLoader()) } else { - Macros.macroClassLoadersCache.getOrCreate(locations.map(_.jfile.toPath()), newLoader) + Macros.macroClassLoadersCache.getOrCreate(locations.map(_.jfile.toPath()), newLoader, closeableRegistry) } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index f649b924e46..8120019444c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -925,8 +925,7 @@ trait Macros extends MacroRuntimes with Traces with Helpers { object Macros { final val macroClassLoadersCache = - new scala.tools.nsc.classpath.FileBasedCache[ScalaClassLoader]() - + new scala.tools.nsc.classpath.FileBasedCache[ScalaClassLoader.URLClassLoader]() } trait MacrosStats { diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala index 827c7ce5dbd..b2ad49c5a38 100644 --- a/src/compiler/scala/tools/nsc/util/ClassPath.scala +++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala @@ -13,6 +13,8 @@ package scala.tools.nsc package util +import java.io.Closeable + import io.{AbstractFile, Directory, File, Jar} import java.net.MalformedURLException import java.net.URL diff --git a/src/compiler/scala/tools/reflect/ReflectMain.scala b/src/compiler/scala/tools/reflect/ReflectMain.scala index 3abd5f39076..a290c6bfafc 100644 --- a/src/compiler/scala/tools/reflect/ReflectMain.scala +++ b/src/compiler/scala/tools/reflect/ReflectMain.scala @@ -14,15 +14,13 @@ package scala.tools package reflect import scala.reflect.internal.util.ScalaClassLoader -import scala.tools.nsc.Driver -import scala.tools.nsc.Global -import scala.tools.nsc.Settings +import scala.tools.nsc.{Driver, Global, CloseableRegistry, Settings} import scala.tools.util.PathResolver object ReflectMain extends Driver { private def classloaderFromSettings(settings: Settings) = { - val classPathURLs = new PathResolver(settings).resultAsURLs + val classPathURLs = new PathResolver(settings, new CloseableRegistry).resultAsURLs ScalaClassLoader.fromURLs(classPathURLs, getClass.getClassLoader) } diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala index 1ad471e40f8..cf454d5854f 100644 --- a/src/compiler/scala/tools/util/PathResolver.scala +++ b/src/compiler/scala/tools/util/PathResolver.scala @@ -15,8 +15,9 @@ package tools package util import java.net.URL + import scala.tools.reflect.WrappedProperties.AccessControl -import scala.tools.nsc.Settings +import scala.tools.nsc.{CloseableRegistry, Settings} import scala.tools.nsc.util.ClassPath import scala.reflect.io.{Directory, File, Path} import PartialFunction.condOpt @@ -189,19 +190,24 @@ object PathResolver { } else { val settings = new Settings() val rest = settings.processArguments(args.toList, processAll = false)._2 - val pr = new PathResolver(settings) - println("COMMAND: 'scala %s'".format(args.mkString(" "))) - println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" "))) - - pr.result match { - case cp: AggregateClassPath => - println(s"ClassPath has ${cp.aggregates.size} entries and results in:\n${cp.asClassPathStrings}") + val registry = new CloseableRegistry + try { + val pr = new PathResolver(settings, registry) + println("COMMAND: 'scala %s'".format(args.mkString(" "))) + println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" "))) + + pr.result match { + case cp: AggregateClassPath => + println(s"ClassPath has ${cp.aggregates.size} entries and results in:\n${cp.asClassPathStrings}") + } + } finally { + registry.close() } } } -final class PathResolver(settings: Settings) { - private val classPathFactory = new ClassPathFactory(settings) +final class PathResolver(settings: Settings, closeableRegistry: CloseableRegistry) { + private val classPathFactory = new ClassPathFactory(settings, closeableRegistry) import PathResolver.{ AsLines, Defaults, ppcp } @@ -250,7 +256,7 @@ final class PathResolver(settings: Settings) { // Assemble the elements! def basis = List[Traversable[ClassPath]]( - JrtClassPath.apply(settings.releaseValue), // 0. The Java 9 classpath (backed by the jrt:/ virtual system, if available) + jrt, // 0. The Java 9+ classpath (backed by the ct.sym or jrt:/ virtual system, if available) classesInPath(javaBootClassPath), // 1. The Java bootstrap class path. contentsOfDirsInPath(javaExtDirs), // 2. The Java extension class path. classesInExpandedPath(javaUserClassPath), // 3. The Java application class path. @@ -261,6 +267,8 @@ final class PathResolver(settings: Settings) { sourcesInPath(sourcePath) // 7. The Scala source path. ) + private def jrt: Option[ClassPath] = JrtClassPath.apply(settings.releaseValue, closeableRegistry) + lazy val containers = basis.flatten.distinct override def toString = s""" diff --git a/src/partest-extras/scala/tools/partest/BytecodeTest.scala b/src/partest-extras/scala/tools/partest/BytecodeTest.scala index b016778bf42..309a6d49c48 100644 --- a/src/partest-extras/scala/tools/partest/BytecodeTest.scala +++ b/src/partest-extras/scala/tools/partest/BytecodeTest.scala @@ -18,6 +18,7 @@ import scala.tools.asm.tree._ import java.io.{InputStream, File => JFile} import AsmNode._ +import scala.tools.nsc.CloseableRegistry /** * Provides utilities for inspecting bytecode using ASM library. @@ -144,7 +145,7 @@ abstract class BytecodeTest { import scala.tools.nsc.Settings // logic inspired by scala.tools.util.PathResolver implementation // `Settings` is used to check YdisableFlatCpCaching in ZipArchiveFlatClassPath - val factory = new ClassPathFactory(new Settings()) + val factory = new ClassPathFactory(new Settings(), new CloseableRegistry) val containers = factory.classesInExpandedPath(sys.props("partest.output") + java.io.File.pathSeparator + Defaults.javaUserClassPath) new AggregateClassPath(containers) } diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index ee109799f3d..05c591b9d53 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -149,6 +149,7 @@ abstract class ZipArchive(override val file: JFile, release: Option[String]) ext if (entry.isDirectory) ensureDir(dirs, entry.getName, entry) else ensureDir(dirs, dirName(entry.getName), null) } + def close(): Unit } /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArchive(file, release) { @@ -232,6 +233,7 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch } } finally { if (ZipArchive.closeZipFile) zipFile.close() + else closeables ::= zipFile } root } @@ -259,6 +261,10 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch case x: FileZipArchive => file.getAbsoluteFile == x.file.getAbsoluteFile case _ => false } + private[this] var closeables: List[java.io.Closeable] = Nil + override def close(): Unit = { + closeables.foreach(_.close) + } } /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ final class URLZipArchive(val url: URL) extends ZipArchive(null) { @@ -266,6 +272,7 @@ final class URLZipArchive(val url: URL) extends ZipArchive(null) { val root = new DirEntry("/") val dirs = mutable.HashMap[String, DirEntry]("" -> root) val in = new ZipInputStream(new ByteArrayInputStream(Streamable.bytes(input))) + closeables ::= in @tailrec def loop() { val zipEntry = in.getNextEntry() @@ -327,6 +334,10 @@ final class URLZipArchive(val url: URL) extends ZipArchive(null) { case x: URLZipArchive => url == x.url case _ => false } + private[this] var closeables: List[java.io.Closeable] = Nil + def close(): Unit = { + closeables.foreach(_.close()) + } } final class ManifestResources(val url: URL) extends ZipArchive(null) { @@ -334,6 +345,8 @@ final class ManifestResources(val url: URL) extends ZipArchive(null) { val root = new DirEntry("/") val dirs = mutable.HashMap[String, DirEntry]("" -> root) val manifest = new Manifest(input) + closeables ::= input + val iter = manifest.getEntries().keySet().iterator().asScala.filter(_.endsWith(".class")).map(new ZipEntry(_)) for (zipEntry <- iter) { @@ -385,4 +398,8 @@ final class ManifestResources(val url: URL) extends ZipArchive(null) { } } } + private[this] var closeables: List[java.io.Closeable] = Nil + override def close(): Unit = { + closeables.foreach(_.close()) + } } diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index 764bb4d4854..73cbc828eea 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -28,6 +28,7 @@ import scala.tools.nsc.util._ import ScalaClassLoader.URLClassLoader import scala.tools.nsc.util.Exceptional.unwrap import java.net.URL +import java.io.Closeable import scala.tools.util.PathResolver import scala.util.{Try => Trying} @@ -63,7 +64,7 @@ import scala.util.{Try => Trying} * @author Moez A. Abdel-Gawad * @author Lex Spoon */ -class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends Imports with PresentationCompilation { +class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends Imports with PresentationCompilation with Closeable { imain => def this(initialSettings: Settings) = this(initialSettings, IMain.defaultOut) @@ -100,7 +101,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def compilerClasspath: Seq[java.net.URL] = ( if (isInitializeComplete) global.classPath.asURLs - else new PathResolver(settings).resultAsURLs // the compiler's classpath + else new PathResolver(settings, global.closeableRegistry).resultAsURLs // the compiler's classpath ) def settings = initialSettings // Run the code body with the given boolean settings flipped to true. @@ -683,6 +684,9 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends */ def close() { reporter.flush() + if (isInitializeComplete) { + global.close() + } } /** Here is where we: diff --git a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala index 4c7f05318c5..296cc5a4b1d 100644 --- a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala +++ b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala @@ -16,7 +16,7 @@ import scala.reflect.internal.util.RangePosition import scala.reflect.io.AbstractFile import scala.tools.nsc.backend.JavaPlatform import scala.tools.nsc.util.ClassPath -import scala.tools.nsc.{interactive, Settings} +import scala.tools.nsc.{interactive, CloseableRegistry, Settings} import scala.tools.nsc.reporters.StoreReporter import scala.tools.nsc.classpath._ @@ -63,10 +63,6 @@ trait PresentationCompilation { * You may downcast the `reporter` to `StoreReporter` to access type errors. */ def newPresentationCompiler(): interactive.Global = { - def mergedFlatClasspath = { - val replOutClasspath = ClassPathFactory.newClassPath(replOutput.dir, settings) - AggregateClassPath(replOutClasspath :: global.platform.classPath :: Nil) - } def copySettings: Settings = { val s = new Settings(_ => () /* ignores "bad option -nc" errors, etc */) s.processArguments(global.settings.recreateArgs, processAll = false) @@ -75,6 +71,11 @@ trait PresentationCompilation { } val storeReporter: StoreReporter = new StoreReporter val interactiveGlobal = new interactive.Global(copySettings, storeReporter) { self => + def mergedFlatClasspath = { + val replOutClasspath = ClassPathFactory.newClassPath(replOutput.dir, settings, closeableRegistry) + AggregateClassPath(replOutClasspath :: global.platform.classPath :: Nil) + } + override lazy val platform: ThisPlatform = { new JavaPlatform { lazy val global: self.type = self diff --git a/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala index 42490c604e1..72b5a7424ce 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala @@ -42,7 +42,7 @@ trait ReplGlobal extends Global { case None => base case Some(out) => // Make bytecode of previous lines available to the inliner - val replOutClasspath = ClassPathFactory.newClassPath(settings.outputDirs.getSingleOutput.get, settings) + val replOutClasspath = ClassPathFactory.newClassPath(settings.outputDirs.getSingleOutput.get, settings, closeableRegistry) AggregateClassPath.createAggregate(platform.classPath, replOutClasspath) } } diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala index 42b0fdfb236..5e3d633d429 100644 --- a/src/scalap/scala/tools/scalap/Main.scala +++ b/src/scalap/scala/tools/scalap/Main.scala @@ -14,8 +14,9 @@ package scala package tools.scalap import java.io.{ByteArrayOutputStream, OutputStreamWriter, PrintStream} + import scala.reflect.NameTransformer -import scala.tools.nsc.Settings +import scala.tools.nsc.{CloseableRegistry, Settings} import scala.tools.nsc.classpath.{AggregateClassPath, ClassPathFactory} import scala.tools.nsc.util.ClassPath import scala.tools.util.PathResolver @@ -185,14 +186,18 @@ object Main extends Main { settings.YdisableFlatCpCaching.value = arguments contains opts.disableFlatClassPathCaching settings.Ylogcp.value = arguments contains opts.logClassPath - val path = createClassPath(cpArg, settings) - - // print the classpath if output is verbose - if (verbose) - Console.println(Console.BOLD + "CLASSPATH" + Console.RESET + " = " + path.asClassPathString) - - // process all given classes - arguments.getOthers foreach process(arguments, path) + val registry = new CloseableRegistry + try { + val path = createClassPath(cpArg, settings, registry) + // print the classpath if output is verbose + if (verbose) + Console.println(Console.BOLD + "CLASSPATH" + Console.RESET + " = " + path.asClassPathString) + + // process all given classes + arguments.getOthers foreach process(arguments, path) + } finally { + registry.close() + } } private def parseArguments(args: Array[String]) = @@ -208,11 +213,11 @@ object Main extends Main { .withOption(opts.logClassPath) .parse(args) - private def createClassPath(cpArg: Option[String], settings: Settings) = cpArg match { + private def createClassPath(cpArg: Option[String], settings: Settings, closeableRegistry: CloseableRegistry) = cpArg match { case Some(cp) => - AggregateClassPath(new ClassPathFactory(settings).classesInExpandedPath(cp)) + AggregateClassPath(new ClassPathFactory(settings, closeableRegistry).classesInExpandedPath(cp)) case _ => settings.classpath.value = "." // include '.' in the default classpath scala/bug#6669 - new PathResolver(settings).result + new PathResolver(settings, closeableRegistry).result } } diff --git a/test/junit/scala/tools/nsc/GlobalCustomizeClassloaderTest.scala b/test/junit/scala/tools/nsc/GlobalCustomizeClassloaderTest.scala index 0e868bd2872..50037970609 100644 --- a/test/junit/scala/tools/nsc/GlobalCustomizeClassloaderTest.scala +++ b/test/junit/scala/tools/nsc/GlobalCustomizeClassloaderTest.scala @@ -48,6 +48,7 @@ class GlobalCustomizeClassloaderTest { case Typed(Literal(Constant(s: String)), _) => Assert.assertEquals(SampleMacro.data, s) case _ => Assert.fail() } + g.close() } } diff --git a/test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala b/test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala index b46677d6d47..fdc2b9caae6 100644 --- a/test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala +++ b/test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala @@ -8,7 +8,7 @@ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import scala.tools.nsc.Settings +import scala.tools.nsc.{CloseableRegistry, Settings} import scala.tools.nsc.backend.jvm.AsmUtils import scala.tools.nsc.util.ClassPath import scala.tools.util.PathResolver @@ -19,14 +19,15 @@ class JrtClassPathTest { @Test def lookupJavaClasses(): Unit = { val specVersion = scala.util.Properties.javaSpecVersion // Run the test using the JDK8 or 9 provider for rt.jar depending on the platform the test is running on. + val closeableRegistry = new CloseableRegistry val cp: ClassPath = if (specVersion == "" || specVersion == "1.8") { val settings = new Settings() - val resolver = new PathResolver(settings) - val elements = new ClassPathFactory(settings).classesInPath(resolver.Calculated.javaBootClassPath) + val resolver = new PathResolver(settings, closeableRegistry) + val elements = new ClassPathFactory(settings, closeableRegistry).classesInPath(resolver.Calculated.javaBootClassPath) AggregateClassPath(elements) } - else JrtClassPath(None).get + else JrtClassPath(None, closeableRegistry).get assertEquals(Nil, cp.classes("")) assertTrue(cp.packages("java").toString, cp.packages("java").exists(_.name == "java.lang")) @@ -37,5 +38,7 @@ class JrtClassPathTest { assertTrue(cp.list("java.lang").classesAndSources.exists(_.name == "Object")) assertTrue(cp.findClass("java.lang.Object").isDefined) assertTrue(cp.findClassFile("java.lang.Object").isDefined) + + closeableRegistry.close() } } diff --git a/test/junit/scala/tools/nsc/classpath/PathResolverBaseTest.scala b/test/junit/scala/tools/nsc/classpath/PathResolverBaseTest.scala index d3d4289d8b9..e8025ec69ec 100644 --- a/test/junit/scala/tools/nsc/classpath/PathResolverBaseTest.scala +++ b/test/junit/scala/tools/nsc/classpath/PathResolverBaseTest.scala @@ -4,13 +4,15 @@ package scala.tools.nsc.classpath import java.io.File + import org.junit.Assert._ import org.junit._ import org.junit.rules.TemporaryFolder import org.junit.runner.RunWith import org.junit.runners.JUnit4 + import scala.tools.nsc.util.ClassPath -import scala.tools.nsc.Settings +import scala.tools.nsc.{CloseableRegistry, Settings} import scala.tools.util.PathResolver @RunWith(classOf[JUnit4]) @@ -57,7 +59,7 @@ class PathResolverBaseTest { def deleteTempDir: Unit = tempDir.delete() private def createFlatClassPath(settings: Settings) = - new PathResolver(settings).result + new PathResolver(settings, new CloseableRegistry).result @Test def testEntriesFromListOperationAgainstSeparateMethods: Unit = { diff --git a/test/junit/scala/tools/nsc/classpath/ZipAndJarFileLookupFactoryTest.scala b/test/junit/scala/tools/nsc/classpath/ZipAndJarFileLookupFactoryTest.scala index f49f04d2c56..b58effbcfa3 100644 --- a/test/junit/scala/tools/nsc/classpath/ZipAndJarFileLookupFactoryTest.scala +++ b/test/junit/scala/tools/nsc/classpath/ZipAndJarFileLookupFactoryTest.scala @@ -14,7 +14,8 @@ class ZipAndJarFileLookupFactoryTest { Files.delete(f) val g = new scala.tools.nsc.Global(new scala.tools.nsc.Settings()) assert(!g.settings.YdisableFlatCpCaching.value) // we're testing with our JAR metadata caching enabled. - def createCp = ZipAndJarClassPathFactory.create(AbstractFile.getFile(f.toFile), g.settings) + val closeableRegistry = new CloseableRegistry + def createCp = ZipAndJarClassPathFactory.create(AbstractFile.getFile(f.toFile), g.settings, closeableRegistry) try { createZip(f, Array(), "p1/C.class") createZip(f, Array(), "p2/X.class") @@ -41,7 +42,10 @@ class ZipAndJarFileLookupFactoryTest { // And that instance should see D, not C, in package p1. assert(cp3.findClass("p1.C").isEmpty) assert(cp3.findClass("p1.D").isDefined) - } finally Files.delete(f) + } finally { + Files.delete(f) + closeableRegistry.close() + } } def createZip(zipLocation: Path, content: Array[Byte], internalPath: String): Unit = { diff --git a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala index cbd5634f292..e2b11cfecd2 100644 --- a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala +++ b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala @@ -36,7 +36,7 @@ class SymbolTableForUnitTesting extends SymbolTable { def platformPhases: List[SubComponent] = Nil - private[nsc] lazy val classPath: ClassPath = new PathResolver(settings).result + private[nsc] lazy val classPath: ClassPath = new PathResolver(settings, new CloseableRegistry).result def isMaybeBoxed(sym: Symbol): Boolean = ??? def needCompile(bin: AbstractFile, src: AbstractFile): Boolean = ??? From d904ee7399cb51cb3c462346671457eef1353ea2 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 29 Oct 2018 11:11:40 +1000 Subject: [PATCH 08/66] Avoid unexplained diverging implicit error with Ordering.ordered --- src/repl/scala/tools/nsc/interpreter/ILoop.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala index a32e2aa02ee..7e4ac310c4d 100644 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala @@ -363,7 +363,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) extend } private def changeSettings(line: String): Result = { - def showSettings() = for (s <- settings.userSetSettings.toSeq.sorted) echo(s.toString) + val settings1 = settings + def showSettings() = for (s <- settings1.userSetSettings.toSeq.sorted) echo(s.toString) if (line.isEmpty) showSettings() else { updateSettings(line) ; () } } private def updateSettings(line: String) = { From 3e5200032fd1c39ddbdaab81b2b230b353c3894d Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 13 Aug 2018 12:54:42 +1000 Subject: [PATCH 09/66] Classpath infrastracture to support pipelining, caching. --- .../scala/tools/nsc/PipelineMain.scala | 456 ++++++++++++++++++ .../tools/nsc/backend/JavaPlatform.scala | 2 +- .../scala/tools/nsc/backend/Platform.scala | 55 +++ .../classpath/VirtualDirectoryClassPath.scala | 2 +- .../tools/nsc/settings/AbsSettings.scala | 2 +- .../tools/nsc/settings/ScalaSettings.scala | 1 + .../tools/nsc/symtab/SymbolLoaders.scala | 2 + .../symtab/classfile/AbstractFileReader.scala | 7 +- .../symtab/classfile/ClassfileParser.scala | 68 ++- .../tools/nsc/typechecker/Analyzer.scala | 12 +- .../scala/tools/nsc/typechecker/Typers.scala | 5 +- .../scala/reflect/internal/Symbols.scala | 2 +- .../reflect/internal/pickling/UnPickler.scala | 3 + .../tools/nsc/classpath/ClassPluginTest.scala | 60 +++ .../symtab/SymbolTableForUnitTesting.scala | 2 +- 15 files changed, 655 insertions(+), 24 deletions(-) create mode 100644 src/compiler/scala/tools/nsc/PipelineMain.scala create mode 100644 test/junit/scala/tools/nsc/classpath/ClassPluginTest.scala diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala new file mode 100644 index 00000000000..5d211323504 --- /dev/null +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -0,0 +1,456 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2018 LAMP/EPFL + * @author Martin Odersky + */ +package scala.tools.nsc + +import java.io.File +import java.nio.ByteBuffer +import java.nio.file.{Files, Path, Paths} +import java.util.Collections + +import javax.tools.{SimpleJavaFileObject, ToolProvider} + +import scala.collection.JavaConverters.asJavaIterableConverter +import scala.collection.{immutable, mutable, parallel} +import scala.concurrent.{Await, ExecutionContext, Future, Promise} +import scala.reflect.internal.pickling.PickleBuffer +import scala.reflect.internal.util.FakePos +import scala.reflect.io.{VirtualDirectory, VirtualFile} +import scala.tools.nsc.backend.{ClassfileInfo, JavaPlatform, ScalaClass, ScalaRawClass} +import scala.tools.nsc.classpath.{DirectoryClassPath, VirtualDirectoryClassPath} +import scala.tools.nsc.io.AbstractFile +import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} +import scala.tools.nsc.util.ClassPath +import scala.util.{Failure, Success} +import scala.concurrent.duration.Duration + +class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy) { + /** Forward errors to the (current) reporter. */ + protected def scalacError(msg: String): Unit = { + reporter.error(FakePos("scalac"), msg + "\n scalac -help gives more information") + } + + private var reporter: Reporter = _ + + implicit val executor = ExecutionContext.fromExecutor(new java.util.concurrent.ForkJoinPool(parallelism)) + val fileManager = ToolProvider.getSystemJavaCompiler.getStandardFileManager(null, null, null) + + private class PickleClassPath[G <: Global](data: mutable.AnyRefMap[G#Symbol, PickleBuffer]) { + val dir = new VirtualDirectory("fakes", None) + val classpath = VirtualDirectoryClassPath(dir) + val dirs = mutable.Map[G#Symbol, AbstractFile]() + val classInfo = mutable.Map[AbstractFile, ClassfileInfo]() + def packageDir(packSymbol: G#Symbol): AbstractFile = { + if (packSymbol.isEmptyPackageClass) dir + else if (dirs.contains(packSymbol)) dirs(packSymbol) + else if (packSymbol.owner.isRoot) { + val subDir = dir.subdirectoryNamed(packSymbol.encodedName) + dirs.put(packSymbol, subDir) + subDir + } else { + val base = packageDir(packSymbol.owner) + val subDir = base.subdirectoryNamed(packSymbol.encodedName) + dirs.put(packSymbol, subDir) + subDir + } + } + for ((symbol, pickle) <- data) { + val base = packageDir(symbol.owner) + if (symbol.isClass) { + val primary = base.fileNamed(symbol.encodedName + ".class") + classInfo(primary) = ScalaClass(symbol.fullNameString, ByteBuffer.wrap(pickle.bytes)) + if (symbol.companionModule.exists) { + val secondary = base.fileNamed(symbol.companionModule.encodedName + "$.class") + classInfo(secondary) = ScalaRawClass(symbol.companionModule.fullNameString) + } + } else if (symbol.isModule) { + if (symbol.companionClass.exists) { + val primary = base.fileNamed(symbol.encodedName + ".class") + classInfo(primary) = ScalaClass(symbol.fullNameString, ByteBuffer.wrap(pickle.bytes)) + val secondary = base.fileNamed(symbol.companionModule.encodedName + "$.class") + classInfo(secondary) = ScalaRawClass(symbol.companionModule.fullNameString) + } else { + val primary = base.fileNamed(symbol.encodedName + "$.class") + classInfo(primary) = ScalaClass(symbol.fullNameString, ByteBuffer.wrap(pickle.bytes)) + } + } + } + } + private val allPickleData = new java.util.concurrent.ConcurrentHashMap[Path, PickleClassPath[_]] + private val allParsedInfos = new java.util.concurrent.ConcurrentHashMap[AbstractFile, ClassfileInfo] + + def process(args: Array[String]): Boolean = { + println(s"parallelism = $parallelism, strategy = $strategy") + + reporter = new ConsoleReporter(new Settings(scalacError)) + + def commandFor(argFileArg: String): Task = { + val ss = new Settings(scalacError) + val command = new CompilerCommand(("@" + argFileArg) :: Nil, ss) + Task(argFileArg, command, command.files) + } + val projects: List[Task] = args.toList.map(commandFor) + val produces = mutable.HashMap[Path, Task]() + for (p <- projects) { + val outputDir = p.command.settings.outputDirs.getSingleOutput.get.file.toPath.toAbsolutePath.normalize() + produces(outputDir) = p + } + val dependsOn = mutable.HashMap[Task, List[Task]]() + for (p <- projects) { + val value: Seq[String] = ClassPath.expandPath(p.command.settings.classpath.value, expandStar = true) + dependsOn(p) = value.flatMap(s => produces.get(Paths.get(s).toAbsolutePath.normalize())).toList.filterNot(_ == p) + } + val dependedOn: Set[Task] = dependsOn.valuesIterator.flatten.toSet + val timer = new Timer + timer.start() + strategy match { + case OutlineTypeOnly => + val futures = projects.map { p => + val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map(_.outlineDone.future)) + p.shouldOutlineType = true + f1.map { _ => + p.outlineCompile() + p.javaCompile() + } + } + + val toWait: Future[List[Unit]] = Future.sequence(futures).flatMap(_ => Future.sequence(projects.flatMap(p => p.javaDone.future :: p.outlineDone.future :: Nil) )) + Await.result(toWait, Duration.Inf) + timer.stop() + + for (p <- projects) { + val dependencies = dependsOn(p) + def maxByOrZero[A](as: List[A])(f: A => Double): Double = if (as.isEmpty) 0d else as.map(f).max + val maxOutlineCriticalPathMs = maxByOrZero(dependencies)(_.outlineCriticalPathMs) + p.outlineCriticalPathMs = maxOutlineCriticalPathMs + p.outlineTimer.durationMs + p.regularCriticalPathMs = maxOutlineCriticalPathMs + maxByOrZero(p.groups)(_.timer.durationMs) + p.fullCriticalPathMs = maxByOrZero(dependencies)(_.fullCriticalPathMs) + p.groups.map(_.timer.durationMs).sum + } + + if (parallelism == 1) { + val criticalPath = projects.maxBy(_.regularCriticalPathMs) + println(f"Critical path: ${criticalPath.regularCriticalPathMs}%.0f ms. Wall Clock: ${timer.durationMs}%.0f ms") + } else + println(f" Wall Clock: ${timer.durationMs}%.0f ms") + case OutlineTypePipeline => + val futures = projects.map { p => + val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map(_.outlineDone.future)) + val shouldOutlineType = dependedOn(p) + p.shouldOutlineType = shouldOutlineType + f1.map { _ => + if (p.shouldOutlineType) { + p.outlineCompile() + } else { + p.fullCompile() + } + } + } + projects.map { + p => + if (p.shouldOutlineType) p.outlineDone.future.onComplete { _ => + p.fullCompile() + } + Future.sequence(p.groups.map(_.done.future)).map(_ => p.javaCompile()) + } + val toWait: Future[List[Unit]] = Future.sequence(futures).flatMap(_ => Future.sequence(projects.flatMap(p => p.javaDone.future :: p.groups.map(_.done.future) ))) + Await.result(toWait, Duration.Inf) + timer.stop() + + for (p <- projects) { + val dependencies = dependsOn(p) + def maxByOrZero[A](as: List[A])(f: A => Double): Double = if (as.isEmpty) 0d else as.map(f).max + val maxOutlineCriticalPathMs = maxByOrZero(dependencies)(_.outlineCriticalPathMs) + p.outlineCriticalPathMs = maxOutlineCriticalPathMs + p.outlineTimer.durationMs + p.regularCriticalPathMs = maxOutlineCriticalPathMs + maxByOrZero(p.groups)(_.timer.durationMs) + p.fullCriticalPathMs = maxByOrZero(dependencies)(_.fullCriticalPathMs) + p.groups.map(_.timer.durationMs).sum + } + + if (parallelism == 1) { + val criticalPath = projects.maxBy(_.regularCriticalPathMs) + println(f"Critical path: ${criticalPath.regularCriticalPathMs}%.0f ms. Wall Clock: ${timer.durationMs}%.0f ms") + } else + println(f" Wall Clock: ${timer.durationMs}%.0f ms") + case Pipeline => + val futures: immutable.Seq[Future[Unit]] = projects.map { p => + val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map(_.outlineDone.future)) + f1.map { _ => p.fullCompileExportPickles(); p.javaCompile() } + } + val toWait: Future[List[Unit]] = Future.sequence(futures).flatMap(_ => Future.sequence(projects.flatMap(p => p.javaDone.future :: p.groups.map(_.done.future) ))) + Await.result(toWait, Duration.Inf) + timer.stop() + + for (p <- projects) { + val dependencies = dependsOn(p) + def maxByOrZero[A](as: List[A])(f: A => Double): Double = if (as.isEmpty) 0d else as.map(f).max + val maxOutlineCriticalPathMs = maxByOrZero(dependencies)(_.outlineCriticalPathMs) + p.outlineCriticalPathMs = maxOutlineCriticalPathMs + p.outlineTimer.durationMs + p.regularCriticalPathMs = maxOutlineCriticalPathMs + maxByOrZero(p.groups)(_.timer.durationMs) + p.fullCriticalPathMs = maxByOrZero(dependencies)(_.fullCriticalPathMs) + p.groups.map(_.timer.durationMs).sum + } + + if (parallelism == 1) { + val criticalPath = projects.maxBy(_.regularCriticalPathMs) + println(f"Critical path: ${criticalPath.regularCriticalPathMs}%.0f ms. Wall Clock: ${timer.durationMs}%.0f ms") + } else + println(f" Wall Clock: ${timer.durationMs}%.0f ms") + case Traditional => + val futures = projects.map { p => + val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map(_.javaDone.future)) + val shouldOutlineType = dependedOn(p) + p.shouldOutlineType = shouldOutlineType + f1.flatMap { _ => + p.fullCompile() + Future.sequence(p.groups.map(_.done.future)).map(_ => p.javaCompile()) + } + } + val toWait: Future[List[Unit]] = Future.sequence(futures).flatMap(_ => Future.sequence(projects.flatMap(p => p.javaDone.future :: p.groups.map(_.done.future) ))) + Await.result(toWait, Duration.Inf) + timer.stop() + + for (p <- projects) { + val dependencies = dependsOn(p) + def maxByOrZero[A](as: List[A])(f: A => Double): Double = if (as.isEmpty) 0d else as.map(f).max + p.fullCriticalPathMs = maxByOrZero(dependencies)(_.fullCriticalPathMs) + p.groups.map(_.timer.durationMs).sum + } + if (parallelism == 1) { + val maxFullCriticalPath: Double = projects.map(_.fullCriticalPathMs).max + println(f"Critical path: $maxFullCriticalPath%.0f ms. Wall Clock: ${timer.durationMs}%.0f ms") + } else { + println(f"Wall Clock: ${timer.durationMs}%.0f ms") + } + } + + val trace = new java.lang.StringBuilder() + trace.append("""{"traceEvents": [""") + val sb = new mutable.StringBuilder(trace) + def durationEvent(name: String, cat: String, t: Timer): String = { + s"""{"name": "$name", "cat": "$cat", "ph": "X", "ts": ${(t.startMicros).toLong}, "dur": ${(t.durationMicros).toLong}, "pid": 0, "tid": ${t.thread.getId}}""" + } + def projectEvents(p: Task): List[String] = { + val events = List.newBuilder[String] + if (p.outlineTimer.durationMicros > 0d) { + events += durationEvent(p.label, "outline-type", p.outlineTimer) + } + for ((g, ix) <- p.groups.zipWithIndex) { + if (g.timer.durationMicros > 0d) + events += durationEvent(p.label, "compile-" + ix, g.timer) + } + events.result() + } + projects.iterator.flatMap(projectEvents).addString(sb, ",\n") + trace.append("]}") + Files.write(Paths.get(s"build-${label}.trace"), trace.toString.getBytes()) + true + } + + case class Group(files: List[String]) { + val timer = new Timer + val done = Promise[Unit]() + } + private case class Task(argsFile: String, command: CompilerCommand, files: List[String]) { + val label = argsFile.replaceAll("target/", "").replaceAll("""(.*)/(.*).args""", "$1:$2") + override def toString: String = argsFile + + command.settings.YcacheMacroClassLoader.value = "none" + + val groups: List[Group] = { + val isScalaLibrary = files.exists(_.endsWith("Predef.scala")) + if (strategy != OutlineTypePipeline || isScalaLibrary) { + Group(files) :: Nil + } else { + command.settings.classpath.value = command.settings.outputDirs.getSingleOutput.get.toString + File.pathSeparator + command.settings.classpath.value + val length = files.length + val groups = (length.toDouble / 128).toInt.max(1) + files.grouped((length.toDouble / groups).ceil.toInt.max(1)).toList.map(Group(_)) + } + } + command.settings.outputDirs.getSingleOutput.get.file.mkdirs() + + val isGrouped = groups.size > 1 + + val outlineTimer = new Timer() + + var shouldOutlineType = true + var outlineCriticalPathMs = 0d + var regularCriticalPathMs = 0d + var fullCriticalPathMs = 0d + val outlineDone = Promise[Unit]() + val javaDone = Promise[Unit]() + + lazy val compiler: Global = { + val result = newCompiler(command.settings) + val reporter = result.reporter + if (reporter.hasErrors) + reporter.flush() + else if (command.shouldStopWithInfo) + reporter.echo(command.getInfoMessage(compiler)) + result + } + + def outlineCompile(): Unit = { + outlineTimer.start() + command.settings.Youtline.value = true + command.settings.stopAfter.value = List("pickler") + command.settings.Ymacroexpand.value = command.settings.MacroExpand.None + val run1 = new compiler.Run() + run1 compile files + allPickleData.put(command.settings.outputDirs.getSingleOutput.get.file.toPath.toRealPath().normalize(), new PickleClassPath(run1.symData)) + outlineTimer.stop() + reporter.finish() + if (reporter.hasErrors) + outlineDone.complete(Failure(new RuntimeException("compile failed"))) + else + outlineDone.complete(Success(())) + } + + def fullCompile(): Unit = { + command.settings.Youtline.value = false + command.settings.stopAfter.value = Nil + command.settings.Ymacroexpand.value = command.settings.MacroExpand.Normal + + for (group <- groups) { + group.done.completeWith { + Future { + val compiler2 = newCompiler(command.settings) + val run2 = new compiler2.Run() + group.timer.start() + run2 compile group.files + compiler2.reporter.finish() + group.timer.stop() + if (compiler2.reporter.hasErrors) { + group.done.complete(Failure(new RuntimeException("Compile failed"))) + } else { + group.done.complete(Success(())) + } + } + } + } + } + + def fullCompileExportPickles(): Unit = { + assert(groups.size == 1) + val group = groups.head + outlineTimer.start() + val run2 = new compiler.Run() { + override def advancePhase(): Unit = { + if (compiler.phase == this.picklerPhase) { + allPickleData.put(command.settings.outputDirs.getSingleOutput.get.file.toPath.toRealPath().normalize(), new PickleClassPath(symData)) + outlineTimer.stop() + outlineDone.complete(Success(())) + group.timer.start() + } + super.advancePhase() + } + } + + run2 compile group.files + compiler.reporter.finish() + group.timer.stop() + if (compiler.reporter.hasErrors) { + group.done.complete(Failure(new RuntimeException("Compile failed"))) + } else { + group.done.complete(Success(())) + } + } + + def javaCompile(): Unit = { + val javaSources = files.filter(_.endsWith(".java")) + if (javaSources.nonEmpty) { + javaDone.completeWith(Future { + val opts = java.util.Arrays.asList("-d", command.settings.outdir.value, "-cp", command.settings.outdir.value + File.pathSeparator + command.settings.classpath.value) + val compileTask = ToolProvider.getSystemJavaCompiler.getTask(null, null, null, opts, null, fileManager.getJavaFileObjects(javaSources.toArray: _*)) + compileTask.setProcessors(Collections.emptyList()) + compileTask.call() + }) + } else { + javaDone.complete(Success(())) + } + } + } + + final class Timer() { + private var startNanos: Long = 0 + private var endNanos: Long = 0 + def start(): Unit = { + assert(startNanos == 0L) + startNanos = System.nanoTime + } + var thread: Thread = Thread.currentThread() + def stop(): Unit = { + thread = Thread.currentThread() + endNanos = System.nanoTime() + } + def startMs: Double = startNanos.toDouble / 1000 / 1000 + def durationMs: Double = (endNanos - startNanos).toDouble / 1000 / 1000 + def startMicros: Double = startNanos.toDouble / 1000d + def durationMicros: Double = (endNanos - startNanos).toDouble / 1000d + } + + protected def newCompiler(settings: Settings): Global = { + val g = Global(settings) + + val plugin: g.platform.ClassPathPlugin = new g.platform.ClassPathPlugin { + val replacements = mutable.Buffer[PickleClassPath[_]]() + override def modifyClassPath(classPath: Seq[ClassPath]): Seq[ClassPath] = { + classPath.flatMap { + case dcp: DirectoryClassPath => + val path = dcp.dir.toPath.toRealPath().normalize() + allPickleData.get(path) match { + case null => + dcp :: Nil + case pcp => + replacements += pcp + pcp.classpath :: dcp :: Nil // leaving the original classpath for Java compiled files for now + } + case cp => cp :: Nil + } + } + + override def info(file: AbstractFile, clazz: g.ClassSymbol): Option[ClassfileInfo] = { + file match { + case vf: VirtualFile => + val iterator = replacements.iterator.flatMap(_.classInfo.get(vf)) + if (iterator.hasNext) + return Some(iterator.next()) + else None + case _ => None + } + allParsedInfos.get(file) match { + case null => None + case info => Some(info) + } + } + override def parsed(file: AbstractFile, clazz: g.ClassSymbol, info: ClassfileInfo): Unit = { + allParsedInfos.put(file, info) + } + } + g.platform.addClassPathPlugin(plugin) + g + } + +} + +sealed abstract class BuildStrategy +case object OutlineTypeOnly extends BuildStrategy +/** Outline type check to compute type signatures as pickles as an input to downstream compilation. */ +case object OutlineTypePipeline extends BuildStrategy +case object Pipeline extends BuildStrategy +/** Emit class files before triggering downstream compilation */ +case object Traditional extends BuildStrategy + +object PipelineMain { + def main(args: Array[String]): Unit = { + var i = 0 + //for (_ <- 1 to 10; n <- List(parallel.availableProcessors, 1); strat <- List(Pipeline, OutlineTypePipeline, Traditional)) { + for (_ <- 1 to 20; n <- List(parallel.availableProcessors); strat <- List(OutlineTypeOnly)) { + i += 1 + val main = new PipelineMainClass(i.toString, n, strat) + println(s"====== ITERATION $i=======") + val result = main.process(args) + if (!result) + System.exit(1) + } + System.exit(0) + } +} diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala index ff11f434710..4d1bd6d8ece 100644 --- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala +++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala @@ -27,7 +27,7 @@ trait JavaPlatform extends Platform { private[nsc] var currentClassPath: Option[ClassPath] = None private[nsc] def classPath: ClassPath = { - if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings).result) + if (currentClassPath.isEmpty) currentClassPath = Some(applyClassPathPlugins(new PathResolver(settings).result)) currentClassPath.get } diff --git a/src/compiler/scala/tools/nsc/backend/Platform.scala b/src/compiler/scala/tools/nsc/backend/Platform.scala index a69e79d4c4f..13d61d3cc39 100644 --- a/src/compiler/scala/tools/nsc/backend/Platform.scala +++ b/src/compiler/scala/tools/nsc/backend/Platform.scala @@ -13,7 +13,10 @@ package scala.tools.nsc package backend +import java.nio.ByteBuffer + import io.AbstractFile +import scala.tools.nsc.classpath.AggregateClassPath import scala.tools.nsc.util.ClassPath /** The platform dependent pieces of Global. @@ -44,5 +47,57 @@ trait Platform { * a re-compile is triggered. On .NET by contrast classfiles always take precedence. */ def needCompile(bin: AbstractFile, src: AbstractFile): Boolean + + /** + * A class path plugin can modify the classpath before it is used by the compiler, and can + * customize the way that the compiler reads the contents of class files. + * + * Applications could include: + * + * - Caching the ScalaSignature annotation contents, to avoid the cost of decompressing + * and parsing the classfile, akin to the OpenJDK's .sig format for stripped class files. + * - Starting a downstream compilation job immediately after the upstream job has completed + * the pickler phase ("Build Pipelineing") + */ + abstract class ClassPathPlugin { + def info(file: AbstractFile, clazz: ClassSymbol): Option[ClassfileInfo] + def parsed(file: AbstractFile, clazz: ClassSymbol, info: ClassfileInfo): Unit = () + def modifyClassPath(classPath: Seq[ClassPath]): Seq[ClassPath] = classPath + } + + /** A list of registered classpath plugins */ + private var classPathPlugins: List[ClassPathPlugin] = Nil + + protected final def applyClassPathPlugins(original: ClassPath): ClassPath = { + val entries = original match { + case AggregateClassPath(entries) => entries + case single => single :: Nil + } + val entries1 = classPathPlugins.foldLeft(entries) { + (entries, plugin) => plugin.modifyClassPath(entries) + } + AggregateClassPath(entries1) + } + + + /** Registers a new classpath plugin */ + final def addClassPathPlugin(plugin: ClassPathPlugin): Unit = { + if (!classPathPlugins.contains(plugin)) + classPathPlugins = plugin :: classPathPlugins + } + final def classFileInfo(file: AbstractFile, clazz: ClassSymbol): Option[ClassfileInfo] = if (classPathPlugins eq Nil) None else { + classPathPlugins.foldLeft(Option.empty[ClassfileInfo]) { + case (Some(info), _) => Some(info) + case (None, plugin) => plugin.info(file, clazz) + } + } + final def classFileInfoParsed(file: AbstractFile, clazz: ClassSymbol, info: ClassfileInfo): Unit = if (classPathPlugins eq Nil) None else { + classPathPlugins.foreach(_.parsed(file, clazz, info)) + } } +sealed abstract class ClassfileInfo {} +final case class ClassBytes(data: ByteBuffer) extends ClassfileInfo +final case class ScalaRawClass(className: String) extends ClassfileInfo +final case class ScalaClass(className: String, pickle: ByteBuffer) extends ClassfileInfo + diff --git a/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala index 5b157e9b386..04ddc61b210 100644 --- a/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala @@ -35,7 +35,7 @@ case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath wi def isPackage(f: AbstractFile): Boolean = f.isPackage // mimic the behavior of the old nsc.util.DirectoryClassPath - def asURLs: Seq[URL] = Seq(new URL(dir.name)) + def asURLs: Seq[URL] = Seq(new URL("file://_VIRTUAL_/" + dir.name)) def asClassPathStrings: Seq[String] = Seq(dir.path) override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl diff --git a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala index 64eeb8717a9..500f4bea606 100644 --- a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala @@ -25,7 +25,7 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings { protected def allSettings: scala.collection.Set[Setting] // settings minus internal usage settings - def visibleSettings = allSettings filterNot (_.isInternalOnly) + def visibleSettings = allSettings.iterator filterNot (_.isInternalOnly) // only settings which differ from default def userSetSettings = visibleSettings filterNot (_.isDefault) diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 655c3528d18..9dbb2699a11 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -243,6 +243,7 @@ trait ScalaSettings extends AbsScalaSettings val YcacheMacroClassLoader = CachePolicy.setting("macro", "macros") val YpartialUnification = BooleanSetting ("-Ypartial-unification", "Enable partial unification in type constructor inference") val Yvirtpatmat = BooleanSetting ("-Yvirtpatmat", "Enable pattern matcher virtualization") + val Youtline = BooleanSetting ("-Youtline", "Don't compile method bodies. Use together with `-Ystop-afer:pickler to generate the pickled signatures for all source files.") val exposeEmptyPackage = BooleanSetting ("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly() val Ydelambdafy = ChoiceSetting ("-Ydelambdafy", "strategy", "Strategy used for translating lambdas into JVM code.", List("inline", "method"), "method") diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 6444823efce..a907a33c421 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -277,6 +277,8 @@ abstract class SymbolLoaders { val classPathEntries = classPath.list(packageName) + if (root.name.string_==("immutable")) + getClass if (!root.isRoot) for (entry <- classPathEntries.classesAndSources) initializeFromClassPath(root, entry) if (!root.isEmptyPackageClass) { diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala index a8d673663e8..19be00dd686 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala @@ -25,11 +25,8 @@ import scala.tools.nsc.io.AbstractFile * @author Philippe Altherr * @version 1.0, 23/03/2004 */ -class AbstractFileReader(val file: AbstractFile) { - - /** the buffer containing the file - */ - val buf: Array[Byte] = file.toByteArray +class AbstractFileReader(val file: AbstractFile, val buf: Array[Byte]) { + def this(file: AbstractFile) = this(file, file.toByteArray) /** the current input pointer */ diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 81f8dfe4454..1a2c6019061 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -15,16 +15,18 @@ package tools.nsc package symtab package classfile -import java.io.{ByteArrayInputStream, DataInputStream, File, IOException} +import java.io._ import java.lang.Integer.toHexString +import java.nio.ByteBuffer import scala.collection.{immutable, mutable} import scala.collection.mutable.{ArrayBuffer, ListBuffer} import scala.annotation.switch import scala.reflect.internal.JavaAccFlags import scala.reflect.internal.pickling.{ByteCodecs, PickleBuffer} -import scala.reflect.io.NoAbstractFile +import scala.reflect.io.{NoAbstractFile, VirtualFile} import scala.reflect.internal.util.Collections._ +import scala.tools.nsc.backend.{ClassBytes, ScalaClass, ScalaRawClass} import scala.tools.nsc.util.ClassPath import scala.tools.nsc.io.AbstractFile import scala.util.control.NonFatal @@ -152,14 +154,56 @@ abstract class ClassfileParser { def parse(file: AbstractFile, clazz: ClassSymbol, module: ModuleSymbol): Unit = { this.file = file pushBusy(clazz) { - this.in = new AbstractFileReader(file) this.clazz = clazz this.staticModule = module this.isScala = false - parseHeader() - this.pool = newConstantPool - parseClass() + import loaders.platform._ + classFileInfo(file, clazz) match { + case Some(info) => + info match { + case ScalaRawClass(className) => + isScalaRaw = true + currentClass = TermName(className) + case ScalaClass(className, pickle) => + val pickle1 = pickle + isScala = true + currentClass = TermName(className) + if (pickle1.hasArray) { + unpickler.unpickle(pickle1.array, pickle1.arrayOffset + pickle1.position(), clazz, staticModule, file.name) + } else { + val array = new Array[Byte](pickle1.remaining) + pickle1.get(array) + unpickler.unpickle(array, 0, clazz, staticModule, file.name) + } + case ClassBytes(data) => + val data1 = data.duplicate() + val array = new Array[Byte](data1.remaining) + data1.get(array) + this.in = new AbstractFileReader(file, array) + parseHeader() + this.pool = newConstantPool + parseClass() + } + case None => + this.in = new AbstractFileReader(file) + parseHeader() + this.pool = newConstantPool + parseClass() + if (!(isScala || isScalaRaw)) + loaders.platform.classFileInfoParsed(file, clazz, ClassBytes(ByteBuffer.wrap(in.buf))) + } + if (isScalaRaw && !isNothingOrNull) { + unlinkRaw() + } + } + } + + private def unlinkRaw(): Unit = { + val decls = clazz.enclosingPackage.info.decls + for (c <- List(clazz, staticModule, staticModule.moduleClass)) { + c.setInfo(NoType) + decls.unlink(c) } } @@ -441,6 +485,15 @@ abstract class ClassfileParser { lookupClass(name) } + // TODO: remove after the next 2.13 milestone + // A bug in the backend caused classes ending in `$` do get only a Scala marker attribute + // instead of a ScalaSig and a Signature annotaiton. This went unnoticed because isScalaRaw + // classes were parsed like Java classes. The below covers the cases in the std lib. + private def isNothingOrNull = { + val n = clazz.fullName.toString + n == "scala.runtime.Nothing$" || n == "scala.runtime.Null$" + } + def parseClass() { val jflags = readClassFlags() val sflags = jflags.toScalaFlags @@ -890,8 +943,8 @@ abstract class ClassfileParser { case Some(san: AnnotationInfo) => val bytes = san.assocs.find({ _._1 == nme.bytes }).get._2.asInstanceOf[ScalaSigBytes].bytes - unpickler.unpickle(bytes, 0, clazz, staticModule, in.file.name) + loaders.platform.classFileInfoParsed(file, clazz, ScalaClass(this.currentClass.toString, ByteBuffer.wrap(bytes))) case None => throw new RuntimeException("Scala class file does not contain Scala annotation") } @@ -1216,6 +1269,7 @@ abstract class ClassfileParser { in.skip(attrLen) case tpnme.ScalaATTR => isScalaRaw = true + loaders.platform.classFileInfoParsed(file, clazz, ScalaRawClass(this.currentClass.toString)) case tpnme.InnerClassesATTR if !isScala => val entries = u2 for (i <- 0 until entries) { diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index b068e43d1ad..bc5ffd0ccd7 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -112,11 +112,13 @@ trait Analyzer extends AnyRef try { val typer = newTyper(rootContext(unit)) unit.body = typer.typed(unit.body) - for (workItem <- unit.toCheck) workItem() - if (settings.warnUnusedImport) - warnUnusedImports(unit) - if (settings.warnUnused.isSetByUser) - new checkUnused(typer).apply(unit) + if (!settings.Youtline.value) { + for (workItem <- unit.toCheck) workItem() + if (settings.warnUnusedImport) + warnUnusedImports(unit) + if (settings.warnUnused.isSetByUser) + new checkUnused(typer).apply(unit) + } } finally { unit.toCheck.clear() diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index acac49cff07..02712851471 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2057,8 +2057,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } // use typedValDef instead. this version is called after creating a new context for the ValDef - private def typedValDefImpl(vdef: ValDef) = { + private def typedValDefImpl(vdef: ValDef): ValDef = { val sym = vdef.symbol.initialize + val typedMods = if (nme.isLocalName(sym.name) && sym.isPrivateThis && !vdef.mods.isPrivateLocal) { // scala/bug#10009 This tree has been given a field symbol by `enterGetterSetter`, patch up the // modifiers accordingly so that we can survive resetAttrs and retypechecking. @@ -5845,7 +5846,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper final def transformedOrTyped(tree: Tree, mode: Mode, pt: Type): Tree = { lookupTransformed(tree) match { case Some(tree1) => tree1 - case _ => typed(tree, mode, pt) + case _ => if (settings.Youtline.value) EmptyTree else typed(tree, mode, pt) } } final def lookupTransformed(tree: Tree): Option[Tree] = diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 3a25d830a20..b8c54bfe293 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -1978,7 +1978,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => var alts0: List[Symbol] = alternatives var alts1: List[Symbol] = Nil - while (alts0.nonEmpty) { + while (!alts0.isEmpty) { if (cond(alts0.head)) alts1 ::= alts0.head else diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index 7b82aa3e9f2..c1fc858cef1 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -249,6 +249,9 @@ abstract class UnPickler { else NoSymbol } + if (owner == definitions.ScalaPackageClass && name == tpnme.AnyRef) + return definitions.AnyRefClass + // (1) Try name. localDummy orElse fromName(name) orElse { // (2) Try with expanded name. Can happen if references to private diff --git a/test/junit/scala/tools/nsc/classpath/ClassPluginTest.scala b/test/junit/scala/tools/nsc/classpath/ClassPluginTest.scala new file mode 100644 index 00000000000..514294e8e8c --- /dev/null +++ b/test/junit/scala/tools/nsc/classpath/ClassPluginTest.scala @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2018 Lightbend. All rights reserved. + */ +package scala.tools.nsc.classpath + +import java.nio.ByteBuffer + +import org.junit.Assert.assertEquals +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.reflect.io.VirtualDirectory +import scala.tools.nsc.backend.{ClassfileInfo, ScalaClass} +import scala.tools.nsc.io.AbstractFile +import scala.tools.nsc.symtab.SymbolTableForUnitTesting +import scala.tools.nsc.util.ClassPath +import scala.tools.testing.BytecodeTesting +import scala.tools.testing.BytecodeTesting.makeSourceFile + +@RunWith(classOf[JUnit4]) +class ClassPluginTest extends BytecodeTesting { + // We use this.compiler to generate Scala pickles... + override def compilerArgs = "-Ystop-after:pickler" + + // ... and this one to read them with a ClassPathPlugin + object symbolTable extends SymbolTableForUnitTesting { + val fakeClasses = Map( + "fake.C" -> ScalaClass("fake.C", pickleOf("package fake; class C { def foo = 42 }")) + ) + private val fakes = new VirtualDirectory("fakes", None) + fakes.subdirectoryNamed("fake").fileNamed("C.class") + + lazy val classpathPlugin = new platform.ClassPathPlugin { + override def modifyClassPath(classPath: Seq[ClassPath]): Seq[ClassPath] = { + // Add a classpath entry with the fake/C.class + VirtualDirectoryClassPath(fakes) +: classPath + } + + override def info(file: AbstractFile, clazz: ClassSymbol): Option[ClassfileInfo] = + fakeClasses.get(clazz.fullNameString) + } + this.platform.addClassPathPlugin(classpathPlugin) + } + + @Test def classPathPluginTest(): Unit = { + import symbolTable._ + val CClass = rootMirror.getRequiredClass("fake.C") + val C_tpe = CClass.info + assertEquals("def foo: Int", definitions.fullyInitializeSymbol(C_tpe.decl(TermName("foo"))).defString) + } + + private def pickleOf(code: String): ByteBuffer = { + import compiler._ + val run = newRun + run.compileSources(makeSourceFile(code, "unitTestSource.scala") :: Nil) + val pickle = run.symData.toList.head._2 + ByteBuffer.wrap(pickle.bytes, 0, pickle.writeIndex) + } +} diff --git a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala index cbd5634f292..bbbc0ec4d88 100644 --- a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala +++ b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala @@ -36,7 +36,7 @@ class SymbolTableForUnitTesting extends SymbolTable { def platformPhases: List[SubComponent] = Nil - private[nsc] lazy val classPath: ClassPath = new PathResolver(settings).result + private[nsc] lazy val classPath: ClassPath = applyClassPathPlugins(new PathResolver(settings).result) def isMaybeBoxed(sym: Symbol): Boolean = ??? def needCompile(bin: AbstractFile, src: AbstractFile): Boolean = ??? From 730a0c7d2e32da91f0bc56e62e42502ea5a18e3b Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 30 Oct 2018 10:45:02 +1000 Subject: [PATCH 10/66] Replace sbt-header with a shim (cherry picked from commit a5869ce9da69368a93fe237ca7084a6b90de4025) --- project/headershim.scala | 34 ++++++++++++++++++++++++++++++++++ project/plugins.sbt | 2 -- 2 files changed, 34 insertions(+), 2 deletions(-) create mode 100644 project/headershim.scala diff --git a/project/headershim.scala b/project/headershim.scala new file mode 100644 index 00000000000..0afa9d1be05 --- /dev/null +++ b/project/headershim.scala @@ -0,0 +1,34 @@ +package de.heikoseeberger.sbtheader + +import sbt._ +import sbt.Keys._ +import sbt.plugins.JvmPlugin + +object HeaderPlugin extends AutoPlugin { + + final object autoImport { + class License + object HeaderLicense { + case class Custom(s: String) extends License + } + val headerLicense: SettingKey[Option[License]] = settingKey[Option[License]]("header License") + + val headerSources = taskKey[scala.collection.Seq[File]]("Sources which need headers checked or created.") + + val headerResources = taskKey[scala.collection.Seq[File]]("Resources which need headers checked or created.") + + def headerSettings(configurations: Configuration*): Seq[Setting[_]] = + configurations.foldLeft(List.empty[Setting[_]]) { _ ++ inConfig(_)(toBeScopedSettings) } + } + + import autoImport._ + + override def trigger = allRequirements + + override def requires = JvmPlugin + + override def projectSettings = headerSettings(Compile, Test) + + private def toBeScopedSettings = Vector(headerSources := Nil, headerResources := Nil) + +} diff --git a/project/plugins.sbt b/project/plugins.sbt index 7a95a915a76..351c52084c6 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -29,5 +29,3 @@ libraryDependencies ++= Seq( concurrentRestrictions in Global := Seq( Tags.limitAll(1) // workaround for https://github.com/sbt/sbt/issues/2970 ) - -addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.0.0") From f4546a640c0f0279c1b503ab5801c37939073d97 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 30 Oct 2018 11:46:26 +1000 Subject: [PATCH 11/66] Revert code inadvertently backported from scala/scala#5952 --- .../tools/nsc/symtab/classfile/ClassfileParser.scala | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 1a2c6019061..07ceccb83ca 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -193,17 +193,6 @@ abstract class ClassfileParser { if (!(isScala || isScalaRaw)) loaders.platform.classFileInfoParsed(file, clazz, ClassBytes(ByteBuffer.wrap(in.buf))) } - if (isScalaRaw && !isNothingOrNull) { - unlinkRaw() - } - } - } - - private def unlinkRaw(): Unit = { - val decls = clazz.enclosingPackage.info.decls - for (c <- List(clazz, staticModule, staticModule.moduleClass)) { - c.setInfo(NoType) - decls.unlink(c) } } From bce8beebc20686e7763263bd161c9d1b262e2761 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Viktor=20Klang=20=28=E2=88=9A=29?= Date: Wed, 31 Oct 2018 21:08:07 +1000 Subject: [PATCH 12/66] Update src/compiler/scala/tools/nsc/GenericRunnerSettings.scala Co-Authored-By: retronym --- src/compiler/scala/tools/nsc/GenericRunnerSettings.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala index d7e379b58eb..fcc829b2e64 100644 --- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala +++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala @@ -19,7 +19,7 @@ class GenericRunnerSettings(error: String => Unit) extends Settings(error) { lazy val classpathURLs: Seq[URL] = { val registry = new CloseableRegistry try { - new PathResolver(this, new CloseableRegistry).resultAsURLs + new PathResolver(this, registry).resultAsURLs } finally { registry.close() } From 5ed8fd0dc1a27c1ed1e429dca868c49c9570d991 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 1 Nov 2018 10:05:02 +1000 Subject: [PATCH 13/66] Fix crasher regression with implicit classes and default params Since the changes to make the compiler output deterministic, default getter symbols must be entered eagerly before the trees are created. This happens in `enterDefDef`, but that method is bypassed when entering the synthetic symbol for an implicit class factory method. This commit enters the default getter symbols in this case, as well, avoiding a later crash. --- .../tools/nsc/typechecker/MethodSynthesis.scala | 7 +++++++ ...icit-class-implicit-param-with-default.check | 5 +++++ ...icit-class-implicit-param-with-default.scala | 17 +++++++++++++++++ 3 files changed, 29 insertions(+) create mode 100644 test/files/run/implicit-class-implicit-param-with-default.check create mode 100644 test/files/run/implicit-class-implicit-param-with-default.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index 20535e89f41..898fce90cef 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -231,6 +231,13 @@ trait MethodSynthesis { val methDef = factoryMeth(classDef.mods & AccessFlags | METHOD | IMPLICIT | SYNTHETIC, classDef.name.toTermName, classDef) val methSym = enterInScope(assignMemberSymbol(methDef)) context.unit.synthetics(methSym) = methDef + + treeInfo.firstConstructor(classDef.impl.body) match { + case primaryConstructor: DefDef => + if (mexists(primaryConstructor.vparamss)(_.mods.hasDefault)) + enterDefaultGetters(methSym, primaryConstructor, primaryConstructor.vparamss, primaryConstructor.tparams) + case _ => + } methSym setInfo implicitFactoryMethodCompleter(methDef, classDef.symbol) } diff --git a/test/files/run/implicit-class-implicit-param-with-default.check b/test/files/run/implicit-class-implicit-param-with-default.check new file mode 100644 index 00000000000..f0ab6fd76b8 --- /dev/null +++ b/test/files/run/implicit-class-implicit-param-with-default.check @@ -0,0 +1,5 @@ +default +default +default +explicit +explicit diff --git a/test/files/run/implicit-class-implicit-param-with-default.scala b/test/files/run/implicit-class-implicit-param-with-default.scala new file mode 100644 index 00000000000..41bebbc8960 --- /dev/null +++ b/test/files/run/implicit-class-implicit-param-with-default.scala @@ -0,0 +1,17 @@ +object Test { + implicit class C(self: String)(implicit val foo: String = "default") { + def test = foo + } + + implicit class WorkaroundOk(self: String)(implicit val foo: String) { + def this(self: String, dummy: AnyRef = null) { this(self)("")} + } + + def main(args: Array[String]) { + println("".foo) + println(C("").foo) + println(new C("").foo) + println(C("")("explicit").foo) + println(new C("")("explicit").foo) + } +} From 5779bbd18a8f9c27f0d62c9618fba9c491487263 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 5 Nov 2018 15:15:28 +1000 Subject: [PATCH 14/66] Small improvements to -Xprint-args - Don't print `-Xprint-args ...` itself - Render a programattically added single output directory as -d. Zinc sets the output directory in this way. ``` scala> import scala.tools.nsc._; val g = new Global(new Settings); g.settings.outputDirs.setSingleOutput("/tmp"); g.settings.printArgs.value = "-"; val run = new g.Run(); run.compileSources(g.newSourceFile("") :: Nil) -d /tmp import scala.tools.nsc._ ``` --- src/compiler/scala/tools/nsc/Global.scala | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index fc3ea84a05e..1a4be86239a 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1446,7 +1446,20 @@ class Global(var currentSettings: Settings, reporter0: Reporter) private def printArgs(sources: List[SourceFile]): Unit = { if (settings.printArgs.isSetByUser) { - val argsFile = (settings.recreateArgs ::: sources.map(_.file.absolute.toString())).mkString("", "\n", "\n") + val singleOuputDir: List[String] = if (settings.d.value == settings.d.default) { + settings.outputDirs.getSingleOutput match { + case Some(file) => + val jfile = file.file + if (jfile != null && !java.nio.file.Files.isSameFile(jfile.toPath, java.nio.file.Paths.get(settings.d.value))) { + // A build tool must have used `settings.outDirs.setSingleOutput`, bypassing `-d`. + // Render that to the equivalent -d arguments. + "-d" :: jfile.toString :: Nil + } else Nil + case _ => Nil + } + } else Nil + val recreated = settings.userSetSettings.toList.filterNot(_ eq settings.printArgs).flatMap(_.unparse) + val argsFile = (recreated ::: singleOuputDir ::: sources.map(_.file.absolute.toString())).mkString("", "\n", "\n") settings.printArgs.value match { case "-" => reporter.echo(argsFile) From 68a096aeee00caf93e87b48ebcb9a9a028a95c50 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 29 Jan 2018 23:05:54 +1000 Subject: [PATCH 15/66] Fix non-termination with java strictfp Also test that the Java parser doesn't force entry of new symbols when it parses modifiers that it translates into symbol annotations. Regressed in #7356 --- .../scala/tools/nsc/javac/JavaParsers.scala | 1 + test/files/jvm/strictfp/StrictFpJava.java | 5 +++++ .../files/presentation/parse-invariants.check | 7 +++++++ .../presentation/parse-invariants/Test.scala | 19 ++++++++++++------- .../parse-invariants/src/a/A.java | 16 ++++++++++++++++ 5 files changed, 41 insertions(+), 7 deletions(-) create mode 100644 test/files/jvm/strictfp/StrictFpJava.java create mode 100644 test/files/presentation/parse-invariants/src/a/A.java diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index 08468cb505b..d87fa7e8da8 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -395,6 +395,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { in.nextToken() case STRICTFP => addAnnot(ScalaStrictFPAttr) + in.nextToken() case SYNCHRONIZED => in.nextToken() case _ => diff --git a/test/files/jvm/strictfp/StrictFpJava.java b/test/files/jvm/strictfp/StrictFpJava.java new file mode 100644 index 00000000000..89e4e94ae52 --- /dev/null +++ b/test/files/jvm/strictfp/StrictFpJava.java @@ -0,0 +1,5 @@ +strictfp class StrictFpJava {} + +class StrictFpJavaMethod { + strictfp void test() {} +} diff --git a/test/files/presentation/parse-invariants.check b/test/files/presentation/parse-invariants.check index 32e9c846ab5..961bc6df793 100644 --- a/test/files/presentation/parse-invariants.check +++ b/test/files/presentation/parse-invariants.check @@ -1,3 +1,10 @@ +parseTree +NoNewSymbolsEntered OK +Unique OK +Unattributed OK +NeverModify OK +AlwaysParseTree OK +parseTree NoNewSymbolsEntered OK Unique OK Unattributed OK diff --git a/test/files/presentation/parse-invariants/Test.scala b/test/files/presentation/parse-invariants/Test.scala index 128896ccaae..c16045ce912 100644 --- a/test/files/presentation/parse-invariants/Test.scala +++ b/test/files/presentation/parse-invariants/Test.scala @@ -5,12 +5,16 @@ import scala.tools.nsc.interactive.Response object Test extends InteractiveTest { override def execute(): Unit = { - val sf = sourceFiles.find(_.file.name == "A.scala").head - noNewSymbols(sf) - uniqueParseTree(sf) - unattributedParseTree(sf) - neverModifyParseTree(sf) - shouldAlwaysReturnParseTree(sf) + def test(fileName: String): Unit = { + val sf = sourceFiles.find(_.file.name == fileName).head + noNewSymbols(sf) + uniqueParseTree(sf) + unattributedParseTree(sf) + neverModifyParseTree(sf) + shouldAlwaysReturnParseTree(sf) + } + test("A.scala") + test("A.java") } /** @@ -19,12 +23,13 @@ object Test extends InteractiveTest { private def noNewSymbols(sf: SourceFile) { def nextId() = compiler.NoSymbol.newTermSymbol(compiler.TermName("dummy"), compiler.NoPosition, compiler.NoFlags).id val id = nextId() + println("parseTree") val tree = compiler.parseTree(sf) val id2 = nextId() if (id2 == id + 1) { reporter.println("NoNewSymbolsEntered OK") } else { - reporter.println("NoNewSymbolsEntered FAILED") + reporter.println("NoNewSymbolsEntered FAILED. ") } } diff --git a/test/files/presentation/parse-invariants/src/a/A.java b/test/files/presentation/parse-invariants/src/a/A.java new file mode 100644 index 00000000000..a0447814910 --- /dev/null +++ b/test/files/presentation/parse-invariants/src/a/A.java @@ -0,0 +1,16 @@ +package syntax; + +class A { + transient volatile int x; + strictfp void test() { + } + + native void nativeMethod() + + synchronized void syncMethod() {} + + void thrower() throws Throwable {} + +} + +strictfp class B {} \ No newline at end of file From 520f9a5509a52af7595f8a0f84fb1d8029c39541 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 19 Nov 2018 12:45:07 +1000 Subject: [PATCH 16/66] Support straight-to-JAR, () => ByteBuffer --- .../scala/tools/nsc/PipelineMain.scala | 18 ++++++++++++++---- .../scala/tools/nsc/backend/Platform.scala | 4 ++-- .../nsc/symtab/classfile/ClassfileParser.scala | 8 ++++---- 3 files changed, 20 insertions(+), 10 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 5d211323504..c1fd402e7d2 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -18,12 +18,13 @@ import scala.reflect.internal.pickling.PickleBuffer import scala.reflect.internal.util.FakePos import scala.reflect.io.{VirtualDirectory, VirtualFile} import scala.tools.nsc.backend.{ClassfileInfo, JavaPlatform, ScalaClass, ScalaRawClass} -import scala.tools.nsc.classpath.{DirectoryClassPath, VirtualDirectoryClassPath} +import scala.tools.nsc.classpath.{DirectoryClassPath, VirtualDirectoryClassPath, ZipArchiveFileLookup} import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} import scala.tools.nsc.util.ClassPath import scala.util.{Failure, Success} import scala.concurrent.duration.Duration +import scala.tools.nsc.classpath.ZipAndJarClassPathFactory.ZipArchiveClassPath class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy) { /** Forward errors to the (current) reporter. */ @@ -59,7 +60,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val base = packageDir(symbol.owner) if (symbol.isClass) { val primary = base.fileNamed(symbol.encodedName + ".class") - classInfo(primary) = ScalaClass(symbol.fullNameString, ByteBuffer.wrap(pickle.bytes)) + classInfo(primary) = ScalaClass(symbol.fullNameString, () => ByteBuffer.wrap(pickle.bytes)) if (symbol.companionModule.exists) { val secondary = base.fileNamed(symbol.companionModule.encodedName + "$.class") classInfo(secondary) = ScalaRawClass(symbol.companionModule.fullNameString) @@ -67,12 +68,12 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } else if (symbol.isModule) { if (symbol.companionClass.exists) { val primary = base.fileNamed(symbol.encodedName + ".class") - classInfo(primary) = ScalaClass(symbol.fullNameString, ByteBuffer.wrap(pickle.bytes)) + classInfo(primary) = ScalaClass(symbol.fullNameString, () => ByteBuffer.wrap(pickle.bytes)) val secondary = base.fileNamed(symbol.companionModule.encodedName + "$.class") classInfo(secondary) = ScalaRawClass(symbol.companionModule.fullNameString) } else { val primary = base.fileNamed(symbol.encodedName + "$.class") - classInfo(primary) = ScalaClass(symbol.fullNameString, ByteBuffer.wrap(pickle.bytes)) + classInfo(primary) = ScalaClass(symbol.fullNameString, () => ByteBuffer.wrap(pickle.bytes)) } } } @@ -394,6 +395,15 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val replacements = mutable.Buffer[PickleClassPath[_]]() override def modifyClassPath(classPath: Seq[ClassPath]): Seq[ClassPath] = { classPath.flatMap { + case zcp: ZipArchiveFileLookup[_] => + val path = zcp.zipFile.toPath.toRealPath().normalize() + allPickleData.get(path) match { + case null => + zcp :: Nil + case pcp => + replacements += pcp + pcp.classpath :: zcp :: Nil // leaving the original classpath for Java compiled files for now + } case dcp: DirectoryClassPath => val path = dcp.dir.toPath.toRealPath().normalize() allPickleData.get(path) match { diff --git a/src/compiler/scala/tools/nsc/backend/Platform.scala b/src/compiler/scala/tools/nsc/backend/Platform.scala index 13d61d3cc39..e0d4d2c59d0 100644 --- a/src/compiler/scala/tools/nsc/backend/Platform.scala +++ b/src/compiler/scala/tools/nsc/backend/Platform.scala @@ -97,7 +97,7 @@ trait Platform { } sealed abstract class ClassfileInfo {} -final case class ClassBytes(data: ByteBuffer) extends ClassfileInfo +final case class ClassBytes(data: () => ByteBuffer) extends ClassfileInfo final case class ScalaRawClass(className: String) extends ClassfileInfo -final case class ScalaClass(className: String, pickle: ByteBuffer) extends ClassfileInfo +final case class ScalaClass(className: String, pickle: () => ByteBuffer) extends ClassfileInfo diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 07ceccb83ca..407073b966f 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -166,7 +166,7 @@ abstract class ClassfileParser { isScalaRaw = true currentClass = TermName(className) case ScalaClass(className, pickle) => - val pickle1 = pickle + val pickle1 = pickle() isScala = true currentClass = TermName(className) if (pickle1.hasArray) { @@ -177,7 +177,7 @@ abstract class ClassfileParser { unpickler.unpickle(array, 0, clazz, staticModule, file.name) } case ClassBytes(data) => - val data1 = data.duplicate() + val data1 = data() val array = new Array[Byte](data1.remaining) data1.get(array) this.in = new AbstractFileReader(file, array) @@ -191,7 +191,7 @@ abstract class ClassfileParser { this.pool = newConstantPool parseClass() if (!(isScala || isScalaRaw)) - loaders.platform.classFileInfoParsed(file, clazz, ClassBytes(ByteBuffer.wrap(in.buf))) + loaders.platform.classFileInfoParsed(file, clazz, ClassBytes(() => ByteBuffer.wrap(in.buf))) } } } @@ -933,7 +933,7 @@ abstract class ClassfileParser { val bytes = san.assocs.find({ _._1 == nme.bytes }).get._2.asInstanceOf[ScalaSigBytes].bytes unpickler.unpickle(bytes, 0, clazz, staticModule, in.file.name) - loaders.platform.classFileInfoParsed(file, clazz, ScalaClass(this.currentClass.toString, ByteBuffer.wrap(bytes))) + loaders.platform.classFileInfoParsed(file, clazz, ScalaClass(this.currentClass.toString, () => ByteBuffer.wrap(bytes))) case None => throw new RuntimeException("Scala class file does not contain Scala annotation") } From faf347de363c19fb6e21ae5bfbe777536d37f23e Mon Sep 17 00:00:00 2001 From: Mike Skells Date: Mon, 26 Nov 2018 20:27:33 +0000 Subject: [PATCH 17/66] fix compile --- test/junit/scala/tools/nsc/classpath/ClassPluginTest.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/junit/scala/tools/nsc/classpath/ClassPluginTest.scala b/test/junit/scala/tools/nsc/classpath/ClassPluginTest.scala index 514294e8e8c..346c58d9ca6 100644 --- a/test/junit/scala/tools/nsc/classpath/ClassPluginTest.scala +++ b/test/junit/scala/tools/nsc/classpath/ClassPluginTest.scala @@ -26,7 +26,7 @@ class ClassPluginTest extends BytecodeTesting { // ... and this one to read them with a ClassPathPlugin object symbolTable extends SymbolTableForUnitTesting { val fakeClasses = Map( - "fake.C" -> ScalaClass("fake.C", pickleOf("package fake; class C { def foo = 42 }")) + "fake.C" -> ScalaClass("fake.C", () => pickleOf("package fake; class C { def foo = 42 }")) ) private val fakes = new VirtualDirectory("fakes", None) fakes.subdirectoryNamed("fake").fileNamed("C.class") From acf26b0d9d3455ddb0e2ead7c1f40df99fe5841e Mon Sep 17 00:00:00 2001 From: Mike Skells Date: Mon, 26 Nov 2018 23:08:45 +0000 Subject: [PATCH 18/66] avoid a few cycles --- src/compiler/scala/tools/nsc/CompilationUnits.scala | 2 +- src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala index 159021bdaca..af9bbc75dd2 100644 --- a/src/compiler/scala/tools/nsc/CompilationUnits.scala +++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala @@ -153,7 +153,7 @@ trait CompilationUnits { global: Global => final def comment(pos: Position, msg: String): Unit = {} /** Is this about a .java source file? */ - lazy val isJava = source.file.name.endsWith(".java") + val isJava = source.file.name.endsWith(".java") override def toString() = source.toString() } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 947b95f57ba..47014f76f98 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -213,7 +213,7 @@ abstract class Pickler extends SubComponent { // initially, but seems not to work, as the bug shows). // Adding the LOCAL_CHILD is necessary to retain exhaustivity warnings under separate // compilation. See test neg/aladdin1055. - val parents = (if (sym.isTrait) List(definitions.ObjectTpe) else Nil) ::: List(sym.tpe) + val parents = if (sym.isTrait) List(definitions.ObjectTpe, sym.tpe) else List(sym.tpe) globals + sym.newClassWithInfo(tpnme.LOCAL_CHILD, parents, EmptyScope, pos = sym.pos) } From 6c361850b913aad403a7d0668d8b7806f163a9ff Mon Sep 17 00:00:00 2001 From: Mike Skells Date: Mon, 26 Nov 2018 23:09:16 +0000 Subject: [PATCH 19/66] typo --- src/compiler/scala/tools/nsc/backend/Platform.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/backend/Platform.scala b/src/compiler/scala/tools/nsc/backend/Platform.scala index e0d4d2c59d0..8975321d001 100644 --- a/src/compiler/scala/tools/nsc/backend/Platform.scala +++ b/src/compiler/scala/tools/nsc/backend/Platform.scala @@ -57,7 +57,7 @@ trait Platform { * - Caching the ScalaSignature annotation contents, to avoid the cost of decompressing * and parsing the classfile, akin to the OpenJDK's .sig format for stripped class files. * - Starting a downstream compilation job immediately after the upstream job has completed - * the pickler phase ("Build Pipelineing") + * the pickler phase ("Build Pipelining") */ abstract class ClassPathPlugin { def info(file: AbstractFile, clazz: ClassSymbol): Option[ClassfileInfo] From 8c02036c7c284a54469869a122da0777e6449fba Mon Sep 17 00:00:00 2001 From: Mike Skells Date: Mon, 26 Nov 2018 23:09:44 +0000 Subject: [PATCH 20/66] pickle java --- src/compiler/scala/tools/nsc/Global.scala | 12 +++++++++--- .../scala/tools/nsc/symtab/classfile/Pickler.scala | 2 ++ 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 1a4be86239a..2c265c52227 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -405,12 +405,18 @@ class Global(var currentSettings: Settings, reporter0: Reporter) def apply(unit: CompilationUnit): Unit + // run only the phases needed + private[this] val runThisPhaseForJava: Boolean = shouldRunThisPhaseForJava + + protected def shouldRunThisPhaseForJava: Boolean = { + this.id > (if (createJavadoc) currentRun.typerPhase.id + else currentRun.namerPhase.id) + } + /** Is current phase cancelled on this unit? */ def cancelled(unit: CompilationUnit) = { - // run the typer only if in `createJavadoc` mode - val maxJavaPhase = if (createJavadoc) currentRun.typerPhase.id else currentRun.namerPhase.id if (Thread.interrupted()) reporter.cancelled = true - reporter.cancelled || unit.isJava && this.id > maxJavaPhase + reporter.cancelled || unit.isJava && runThisPhaseForJava } private def beforeUnit(unit: CompilationUnit): Unit = { diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 47014f76f98..4cec57aa3e9 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -90,6 +90,8 @@ abstract class Pickler extends SubComponent { throw e } } + + override protected def shouldRunThisPhaseForJava: Boolean = true //from some -Y ?? } private class Pickle(root: Symbol) extends PickleBuffer(new Array[Byte](4096), -1, 0) { From 58aaf2ac453cf1c971e403abcca4e10f18b58b79 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 28 Nov 2018 15:40:21 +1000 Subject: [PATCH 21/66] Fix compilation --- src/compiler/scala/tools/nsc/CompilationUnits.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala index af9bbc75dd2..46386beb58e 100644 --- a/src/compiler/scala/tools/nsc/CompilationUnits.scala +++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala @@ -21,7 +21,7 @@ trait CompilationUnits { global: Global => /** An object representing a missing compilation unit. */ object NoCompilationUnit extends CompilationUnit(NoSourceFile) { - override lazy val isJava = false + override val isJava = false override def exists = false override def toString() = "NoCompilationUnit" } From 9ef77c5dc1bc012757be24a20e4003e6455e20f0 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 28 Nov 2018 15:41:28 +1000 Subject: [PATCH 22/66] Fix NPE --- src/compiler/scala/tools/nsc/Global.scala | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 2c265c52227..8887024fc83 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -406,8 +406,6 @@ class Global(var currentSettings: Settings, reporter0: Reporter) def apply(unit: CompilationUnit): Unit // run only the phases needed - private[this] val runThisPhaseForJava: Boolean = shouldRunThisPhaseForJava - protected def shouldRunThisPhaseForJava: Boolean = { this.id > (if (createJavadoc) currentRun.typerPhase.id else currentRun.namerPhase.id) @@ -416,7 +414,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) /** Is current phase cancelled on this unit? */ def cancelled(unit: CompilationUnit) = { if (Thread.interrupted()) reporter.cancelled = true - reporter.cancelled || unit.isJava && runThisPhaseForJava + reporter.cancelled || unit.isJava && shouldRunThisPhaseForJava } private def beforeUnit(unit: CompilationUnit): Unit = { From ba43e4de2c9f82bf1e93cff65e281441bf416241 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 28 Nov 2018 15:44:16 +1000 Subject: [PATCH 23/66] Remove debug code --- src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 4875ce158a4..2ad68f4d620 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -284,8 +284,6 @@ abstract class SymbolLoaders { val classPathEntries = classPath.list(packageName) - if (root.name.string_==("immutable")) - getClass if (!root.isRoot) for (entry <- classPathEntries.classesAndSources) initializeFromClassPath(root, entry) if (!root.isEmptyPackageClass) { From c1a0c9361c921e0a4dc7c7abff3a4cd3ddde4a2c Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 28 Nov 2018 15:48:17 +1000 Subject: [PATCH 24/66] Improving pickle handoff - Fix pickle hand off for modules without a companion class - Block javac compilation on current module's scala compilation (same as CompileOrder.Mixed in SBT) --- .../scala/tools/nsc/PipelineMain.scala | 37 +++++++++++++------ 1 file changed, 25 insertions(+), 12 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index c1fd402e7d2..56b0579fafb 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -66,15 +66,10 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy classInfo(secondary) = ScalaRawClass(symbol.companionModule.fullNameString) } } else if (symbol.isModule) { - if (symbol.companionClass.exists) { val primary = base.fileNamed(symbol.encodedName + ".class") classInfo(primary) = ScalaClass(symbol.fullNameString, () => ByteBuffer.wrap(pickle.bytes)) val secondary = base.fileNamed(symbol.companionModule.encodedName + "$.class") classInfo(secondary) = ScalaRawClass(symbol.companionModule.fullNameString) - } else { - val primary = base.fileNamed(symbol.encodedName + "$.class") - classInfo(primary) = ScalaClass(symbol.fullNameString, () => ByteBuffer.wrap(pickle.bytes)) - } } } } @@ -175,7 +170,11 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy case Pipeline => val futures: immutable.Seq[Future[Unit]] = projects.map { p => val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map(_.outlineDone.future)) - f1.map { _ => p.fullCompileExportPickles(); p.javaCompile() } + val scalaCompiles: Future[Unit] = f1.map { _ => p.fullCompileExportPickles() } + // Start javac after scalac has completely finished + val f2 = Future.sequence[Unit, List](p.groups.map(_.done.future)) + val javaCompiles: Future[Unit] = f2.map { _ => p.javaCompile() } + scalaCompiles.flatMap(_ => javaCompiles) } val toWait: Future[List[Unit]] = Future.sequence(futures).flatMap(_ => Future.sequence(projects.flatMap(p => p.javaDone.future :: p.groups.map(_.done.future) ))) Await.result(toWait, Duration.Inf) @@ -231,12 +230,17 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy def projectEvents(p: Task): List[String] = { val events = List.newBuilder[String] if (p.outlineTimer.durationMicros > 0d) { - events += durationEvent(p.label, "outline-type", p.outlineTimer) + val desc = if (p.shouldOutlineType) "outline-type" else "parser-to-pickler" + events += durationEvent(p.label, desc, p.outlineTimer) } for ((g, ix) <- p.groups.zipWithIndex) { if (g.timer.durationMicros > 0d) events += durationEvent(p.label, "compile-" + ix, g.timer) } + if (p.javaTimer.durationMicros > 0d) { + val desc = "javac" + events += durationEvent(p.label, desc, p.javaTimer) + } events.result() } projects.iterator.flatMap(projectEvents).addString(sb, ",\n") @@ -271,6 +275,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val isGrouped = groups.size > 1 val outlineTimer = new Timer() + val javaTimer = new Timer() var shouldOutlineType = true var outlineCriticalPathMs = 0d @@ -332,6 +337,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy def fullCompileExportPickles(): Unit = { assert(groups.size == 1) val group = groups.head + log("scalac: start") outlineTimer.start() val run2 = new compiler.Run() { override def advancePhase(): Unit = { @@ -340,6 +346,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy outlineTimer.stop() outlineDone.complete(Success(())) group.timer.start() + log("scalac: exported pickles") } super.advancePhase() } @@ -348,6 +355,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy run2 compile group.files compiler.reporter.finish() group.timer.stop() + log("scalac: done") if (compiler.reporter.hasErrors) { group.done.complete(Failure(new RuntimeException("Compile failed"))) } else { @@ -356,18 +364,24 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } def javaCompile(): Unit = { + log("javac: start") val javaSources = files.filter(_.endsWith(".java")) if (javaSources.nonEmpty) { + javaTimer.start() javaDone.completeWith(Future { val opts = java.util.Arrays.asList("-d", command.settings.outdir.value, "-cp", command.settings.outdir.value + File.pathSeparator + command.settings.classpath.value) val compileTask = ToolProvider.getSystemJavaCompiler.getTask(null, null, null, opts, null, fileManager.getJavaFileObjects(javaSources.toArray: _*)) compileTask.setProcessors(Collections.emptyList()) compileTask.call() + javaTimer.stop() + () }) } else { javaDone.complete(Success(())) } + log("javac: start") } + def log(msg: String) = () //Predef.println(this.label + ": " + msg) } final class Timer() { @@ -402,7 +416,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy zcp :: Nil case pcp => replacements += pcp - pcp.classpath :: zcp :: Nil // leaving the original classpath for Java compiled files for now + pcp.classpath :: Nil } case dcp: DirectoryClassPath => val path = dcp.dir.toPath.toRealPath().normalize() @@ -411,7 +425,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy dcp :: Nil case pcp => replacements += pcp - pcp.classpath :: dcp :: Nil // leaving the original classpath for Java compiled files for now + pcp.classpath :: Nil } case cp => cp :: Nil } @@ -419,7 +433,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy override def info(file: AbstractFile, clazz: g.ClassSymbol): Option[ClassfileInfo] = { file match { - case vf: VirtualFile => + case vf: VirtualFile if vf.getClass == classOf[VirtualFile] => val iterator = replacements.iterator.flatMap(_.classInfo.get(vf)) if (iterator.hasNext) return Some(iterator.next()) @@ -452,8 +466,7 @@ case object Traditional extends BuildStrategy object PipelineMain { def main(args: Array[String]): Unit = { var i = 0 - //for (_ <- 1 to 10; n <- List(parallel.availableProcessors, 1); strat <- List(Pipeline, OutlineTypePipeline, Traditional)) { - for (_ <- 1 to 20; n <- List(parallel.availableProcessors); strat <- List(OutlineTypeOnly)) { + for (_ <- 1 to 10; n <- List(parallel.availableProcessors); strat <- List(Pipeline, OutlineTypePipeline, Traditional)) { i += 1 val main = new PipelineMainClass(i.toString, n, strat) println(s"====== ITERATION $i=======") From 4abf85b108e58df31a436a3112123fb80ebc4849 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 28 Nov 2018 16:19:27 +1000 Subject: [PATCH 25/66] Move testing code into its own Main --- src/compiler/scala/tools/nsc/PipelineMain.scala | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 56b0579fafb..22feed5d9ba 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -464,6 +464,17 @@ case object Pipeline extends BuildStrategy case object Traditional extends BuildStrategy object PipelineMain { + def main(args: Array[String]): Unit = { + val main = new PipelineMainClass("1", parallel.availableProcessors, Pipeline) + val result = main.process(args) + if (!result) + System.exit(1) + else + System.exit(0) + } +} + +object PipelineMainTest { def main(args: Array[String]): Unit = { var i = 0 for (_ <- 1 to 10; n <- List(parallel.availableProcessors); strat <- List(Pipeline, OutlineTypePipeline, Traditional)) { From fe91627c5136212a6710637b1c4821c3efa3ff32 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 28 Nov 2018 16:38:07 +1000 Subject: [PATCH 26/66] Remove strategy just used for performance testing. --- .../scala/tools/nsc/PipelineMain.scala | 49 ++++++------------- 1 file changed, 16 insertions(+), 33 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 22feed5d9ba..ac8318166ba 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -89,8 +89,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val projects: List[Task] = args.toList.map(commandFor) val produces = mutable.HashMap[Path, Task]() for (p <- projects) { - val outputDir = p.command.settings.outputDirs.getSingleOutput.get.file.toPath.toAbsolutePath.normalize() - produces(outputDir) = p + produces(p.outputDir) = p } val dependsOn = mutable.HashMap[Task, List[Task]]() for (p <- projects) { @@ -101,37 +100,14 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val timer = new Timer timer.start() strategy match { - case OutlineTypeOnly => - val futures = projects.map { p => - val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map(_.outlineDone.future)) - p.shouldOutlineType = true - f1.map { _ => - p.outlineCompile() - p.javaCompile() - } - } - - val toWait: Future[List[Unit]] = Future.sequence(futures).flatMap(_ => Future.sequence(projects.flatMap(p => p.javaDone.future :: p.outlineDone.future :: Nil) )) - Await.result(toWait, Duration.Inf) - timer.stop() - - for (p <- projects) { - val dependencies = dependsOn(p) - def maxByOrZero[A](as: List[A])(f: A => Double): Double = if (as.isEmpty) 0d else as.map(f).max - val maxOutlineCriticalPathMs = maxByOrZero(dependencies)(_.outlineCriticalPathMs) - p.outlineCriticalPathMs = maxOutlineCriticalPathMs + p.outlineTimer.durationMs - p.regularCriticalPathMs = maxOutlineCriticalPathMs + maxByOrZero(p.groups)(_.timer.durationMs) - p.fullCriticalPathMs = maxByOrZero(dependencies)(_.fullCriticalPathMs) + p.groups.map(_.timer.durationMs).sum - } - - if (parallelism == 1) { - val criticalPath = projects.maxBy(_.regularCriticalPathMs) - println(f"Critical path: ${criticalPath.regularCriticalPathMs}%.0f ms. Wall Clock: ${timer.durationMs}%.0f ms") - } else - println(f" Wall Clock: ${timer.durationMs}%.0f ms") case OutlineTypePipeline => val futures = projects.map { p => - val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map(_.outlineDone.future)) + val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map { task => + if (p.macroClassPathSet.contains(task.outputDir)) + task.javaDone.future + else + task.outlineDone.future + }) val shouldOutlineType = dependedOn(p) p.shouldOutlineType = shouldOutlineType f1.map { _ => @@ -169,7 +145,12 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy println(f" Wall Clock: ${timer.durationMs}%.0f ms") case Pipeline => val futures: immutable.Seq[Future[Unit]] = projects.map { p => - val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map(_.outlineDone.future)) + val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map(task => { + if (p.macroClassPathSet.contains(task.outputDir)) + task.javaDone.future + else + task.outlineDone.future + })) val scalaCompiles: Future[Unit] = f1.map { _ => p.fullCompileExportPickles() } // Start javac after scalac has completely finished val f2 = Future.sequence[Unit, List](p.groups.map(_.done.future)) @@ -256,6 +237,9 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy private case class Task(argsFile: String, command: CompilerCommand, files: List[String]) { val label = argsFile.replaceAll("target/", "").replaceAll("""(.*)/(.*).args""", "$1:$2") override def toString: String = argsFile + def outputDir: Path = command.settings.outputDirs.getSingleOutput.get.file.toPath.toAbsolutePath.normalize() + def macroClassPath: Seq[Path] = ClassPath.expandPath(command.settings.YmacroClasspath.value, expandStar = true).map(s => Paths.get(s).toAbsolutePath.normalize()) + def macroClassPathSet: Set[Path] = macroClassPath.toSet command.settings.YcacheMacroClassLoader.value = "none" @@ -456,7 +440,6 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } sealed abstract class BuildStrategy -case object OutlineTypeOnly extends BuildStrategy /** Outline type check to compute type signatures as pickles as an input to downstream compilation. */ case object OutlineTypePipeline extends BuildStrategy case object Pipeline extends BuildStrategy From fbc519cbaf7f91307a9d75b8ff1360323b7356c1 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 28 Nov 2018 17:03:00 +1000 Subject: [PATCH 27/66] Refactor --- .../scala/tools/nsc/PipelineMain.scala | 29 +++++++------------ 1 file changed, 11 insertions(+), 18 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index ac8318166ba..a4ab921a274 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -391,26 +391,19 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val plugin: g.platform.ClassPathPlugin = new g.platform.ClassPathPlugin { val replacements = mutable.Buffer[PickleClassPath[_]]() + def replaceInternalClassPath(cp: ClassPath, underlying: Path): List[ClassPath] = { + allPickleData.get(underlying.toRealPath().normalize()) match { + case null => + cp :: Nil + case pcp => + replacements += pcp + pcp.classpath :: Nil + } + } override def modifyClassPath(classPath: Seq[ClassPath]): Seq[ClassPath] = { classPath.flatMap { - case zcp: ZipArchiveFileLookup[_] => - val path = zcp.zipFile.toPath.toRealPath().normalize() - allPickleData.get(path) match { - case null => - zcp :: Nil - case pcp => - replacements += pcp - pcp.classpath :: Nil - } - case dcp: DirectoryClassPath => - val path = dcp.dir.toPath.toRealPath().normalize() - allPickleData.get(path) match { - case null => - dcp :: Nil - case pcp => - replacements += pcp - pcp.classpath :: Nil - } + case zcp: ZipArchiveFileLookup[_] => replaceInternalClassPath(zcp, zcp.zipFile.toPath) + case dcp: DirectoryClassPath => replaceInternalClassPath(dcp, dcp.dir.toPath) case cp => cp :: Nil } } From 2e1cbe91665e23b99723555f653604dda9d47d6d Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 30 Nov 2018 13:37:50 +1000 Subject: [PATCH 28/66] Add hard depdendency on .class files for plugins --- src/compiler/scala/tools/nsc/PipelineMain.scala | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index a4ab921a274..67e8af4865c 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -103,7 +103,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy case OutlineTypePipeline => val futures = projects.map { p => val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map { task => - if (p.macroClassPathSet.contains(task.outputDir)) + if (p.macroClassPathSet.contains(task.outputDir) || p.pluginClassPath.contains(task.outputDir)) task.javaDone.future else task.outlineDone.future @@ -146,7 +146,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy case Pipeline => val futures: immutable.Seq[Future[Unit]] = projects.map { p => val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map(task => { - if (p.macroClassPathSet.contains(task.outputDir)) + if (p.macroClassPathSet.contains(task.outputDir) || p.pluginClassPath.contains(task.outputDir)) task.javaDone.future else task.outlineDone.future @@ -240,6 +240,11 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy def outputDir: Path = command.settings.outputDirs.getSingleOutput.get.file.toPath.toAbsolutePath.normalize() def macroClassPath: Seq[Path] = ClassPath.expandPath(command.settings.YmacroClasspath.value, expandStar = true).map(s => Paths.get(s).toAbsolutePath.normalize()) def macroClassPathSet: Set[Path] = macroClassPath.toSet + def pluginClassPath: Set[Path] = { + def asPath(p: String) = ClassPath split p + val paths = command.settings.plugin.value filter (_ != "") flatMap (s => asPath(s) map (s => Paths.get(s))) + paths.toSet + } command.settings.YcacheMacroClassLoader.value = "none" From 4b6f14b38c87ab817cb1a64c9dcdcf7252bddbd6 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 30 Nov 2018 16:57:55 +1000 Subject: [PATCH 29/66] Refactor, fix plugin handling --- .../scala/tools/nsc/PipelineMain.scala | 41 +++++++++++-------- 1 file changed, 24 insertions(+), 17 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 67e8af4865c..e83bee7f065 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -93,8 +93,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } val dependsOn = mutable.HashMap[Task, List[Task]]() for (p <- projects) { - val value: Seq[String] = ClassPath.expandPath(p.command.settings.classpath.value, expandStar = true) - dependsOn(p) = value.flatMap(s => produces.get(Paths.get(s).toAbsolutePath.normalize())).toList.filterNot(_ == p) + dependsOn(p) = (p.classPath ++ p.pluginClassPath).flatMap(s => produces.get(s)).toList.filterNot(_ == p) } val dependedOn: Set[Task] = dependsOn.valuesIterator.flatten.toSet val timer = new Timer @@ -102,12 +101,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy strategy match { case OutlineTypePipeline => val futures = projects.map { p => - val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map { task => - if (p.macroClassPathSet.contains(task.outputDir) || p.pluginClassPath.contains(task.outputDir)) - task.javaDone.future - else - task.outlineDone.future - }) + val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map { task => p.dependencyReadyFuture(task) }) val shouldOutlineType = dependedOn(p) p.shouldOutlineType = shouldOutlineType f1.map { _ => @@ -145,12 +139,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy println(f" Wall Clock: ${timer.durationMs}%.0f ms") case Pipeline => val futures: immutable.Seq[Future[Unit]] = projects.map { p => - val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map(task => { - if (p.macroClassPathSet.contains(task.outputDir) || p.pluginClassPath.contains(task.outputDir)) - task.javaDone.future - else - task.outlineDone.future - })) + val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map(task => p.dependencyReadyFuture(task))) val scalaCompiles: Future[Unit] = f1.map { _ => p.fullCompileExportPickles() } // Start javac after scalac has completely finished val f2 = Future.sequence[Unit, List](p.groups.map(_.done.future)) @@ -238,13 +227,26 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val label = argsFile.replaceAll("target/", "").replaceAll("""(.*)/(.*).args""", "$1:$2") override def toString: String = argsFile def outputDir: Path = command.settings.outputDirs.getSingleOutput.get.file.toPath.toAbsolutePath.normalize() - def macroClassPath: Seq[Path] = ClassPath.expandPath(command.settings.YmacroClasspath.value, expandStar = true).map(s => Paths.get(s).toAbsolutePath.normalize()) + private def expand(s: command.settings.PathSetting): List[Path] = { + ClassPath.expandPath(s.value, expandStar = true).map(s => Paths.get(s).toAbsolutePath.normalize()) + } + def classPath: Seq[Path] = expand(command.settings.classpath) + def macroClassPath: Seq[Path] = expand(command.settings.YmacroClasspath) def macroClassPathSet: Set[Path] = macroClassPath.toSet def pluginClassPath: Set[Path] = { def asPath(p: String) = ClassPath split p val paths = command.settings.plugin.value filter (_ != "") flatMap (s => asPath(s) map (s => Paths.get(s))) paths.toSet } + def dependencyReadyFuture(dependency: Task) = if (macroClassPathSet.contains(dependency.outputDir)) { + log(s"dependency is on macro classpath, will wait for .class files: ${dependency.label}") + dependency.javaDone.future + } else if (pluginClassPath.contains(dependency.outputDir)) { + log(s"dependency is on plugin classpath, will wait for .class files: ${dependency.label}") + dependency.javaDone.future + } else + dependency.outlineDone.future + command.settings.YcacheMacroClassLoader.value = "none" @@ -370,7 +372,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } log("javac: start") } - def log(msg: String) = () //Predef.println(this.label + ": " + msg) + def log(msg: String) = Predef.println(this.label + ": " + msg) } final class Timer() { @@ -462,7 +464,12 @@ object PipelineMainTest { i += 1 val main = new PipelineMainClass(i.toString, n, strat) println(s"====== ITERATION $i=======") - val result = main.process(args) + val result = main.process(Array( + "/code/boxer/macros/target/compile.args", + "/code/boxer/plugin/target/compile.args", + "/code/boxer/support/target/compile.args", + "/code/boxer/use-macro/target/compile.args", + "/code/boxer/use-plugin/target/compile.args")) if (!result) System.exit(1) } From 99ddf46cbdb780e55a88cc50e639c98d04d0ae52 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 30 Nov 2018 17:06:18 +1000 Subject: [PATCH 30/66] Fixup java pickle support --- src/compiler/scala/tools/nsc/Global.scala | 4 ++-- src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala | 4 +++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 8887024fc83..efcfd8d2a35 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -406,7 +406,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) def apply(unit: CompilationUnit): Unit // run only the phases needed - protected def shouldRunThisPhaseForJava: Boolean = { + protected def shouldSkipThisPhaseForJava: Boolean = { this.id > (if (createJavadoc) currentRun.typerPhase.id else currentRun.namerPhase.id) } @@ -414,7 +414,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) /** Is current phase cancelled on this unit? */ def cancelled(unit: CompilationUnit) = { if (Thread.interrupted()) reporter.cancelled = true - reporter.cancelled || unit.isJava && shouldRunThisPhaseForJava + reporter.cancelled || unit.isJava && shouldSkipThisPhaseForJava } private def beforeUnit(unit: CompilationUnit): Unit = { diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 4cec57aa3e9..84b97ef4dbb 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -41,6 +41,8 @@ abstract class Pickler extends SubComponent { class PicklePhase(prev: Phase) extends StdPhase(prev) { def apply(unit: CompilationUnit): Unit = { + if (unit.isJava) + getClass def pickle(tree: Tree): Unit = { tree match { case PackageDef(_, stats) => @@ -91,7 +93,7 @@ abstract class Pickler extends SubComponent { } } - override protected def shouldRunThisPhaseForJava: Boolean = true //from some -Y ?? + override protected def shouldSkipThisPhaseForJava: Boolean = false //from some -Y ?? } private class Pickle(root: Symbol) extends PickleBuffer(new Array[Byte](4096), -1, 0) { From 4a5d95111263ffd242d74e017e52353ae8d34a24 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 30 Nov 2018 17:06:35 +1000 Subject: [PATCH 31/66] Fixup java pickle support --- src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 84b97ef4dbb..fa0bb189892 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -41,8 +41,6 @@ abstract class Pickler extends SubComponent { class PicklePhase(prev: Phase) extends StdPhase(prev) { def apply(unit: CompilationUnit): Unit = { - if (unit.isJava) - getClass def pickle(tree: Tree): Unit = { tree match { case PackageDef(_, stats) => From 385c1fb10804e61b90a21b4415e3c088906863f4 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 3 Dec 2018 10:52:49 +1000 Subject: [PATCH 32/66] Better error handling --- src/compiler/scala/tools/nsc/PipelineMain.scala | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index e83bee7f065..eb6a4d80243 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -5,6 +5,7 @@ package scala.tools.nsc import java.io.File +import java.lang.Thread.UncaughtExceptionHandler import java.nio.ByteBuffer import java.nio.file.{Files, Path, Paths} import java.util.Collections @@ -33,8 +34,13 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } private var reporter: Reporter = _ - - implicit val executor = ExecutionContext.fromExecutor(new java.util.concurrent.ForkJoinPool(parallelism)) + private object handler extends UncaughtExceptionHandler { + override def uncaughtException(t: Thread, e: Throwable): Unit = { + e.printStackTrace() + System.exit(-1) + } + } + implicit val executor = ExecutionContext.fromExecutor(new java.util.concurrent.ForkJoinPool(parallelism), t => handler.uncaughtException(Thread.currentThread(), t)) val fileManager = ToolProvider.getSystemJavaCompiler.getStandardFileManager(null, null, null) private class PickleClassPath[G <: Global](data: mutable.AnyRefMap[G#Symbol, PickleBuffer]) { From 18195694fe79cc256616ce93e6d8fdba7e8a2fc7 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 3 Dec 2018 11:39:14 +1000 Subject: [PATCH 33/66] Refactor --- .../scala/tools/nsc/PipelineMain.scala | 78 +++++++++++-------- 1 file changed, 45 insertions(+), 33 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index eb6a4d80243..e22c48a3804 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -106,27 +106,38 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy timer.start() strategy match { case OutlineTypePipeline => - val futures = projects.map { p => - val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map { task => p.dependencyReadyFuture(task) }) - val shouldOutlineType = dependedOn(p) - p.shouldOutlineType = shouldOutlineType - f1.map { _ => - if (p.shouldOutlineType) { - p.outlineCompile() - } else { - p.fullCompile() + projects.foreach {p => + val isLeaf = !dependedOn.contains(p) + val depsReady = Future.sequence(dependsOn.getOrElse(p, Nil).map { task => p.dependencyReadyFuture(task) }) + if (isLeaf) { + for { + _ <- depsReady + _ <- { + p.fullCompile() + Future.sequence(p.groups.map(_.done.future)) + } + } yield { + p.javaCompile() } - } - } - projects.map { - p => - if (p.shouldOutlineType) p.outlineDone.future.onComplete { _ => - p.fullCompile() + } else { + for { + _ <- depsReady + _ <- { + p.outlineCompile() + p.outlineDone.future + } + _ <- { + p.fullCompile() + Future.sequence(p.groups.map(_.done.future)) + } + } yield { + p.javaCompile() } - Future.sequence(p.groups.map(_.done.future)).map(_ => p.javaCompile()) + + } } - val toWait: Future[List[Unit]] = Future.sequence(futures).flatMap(_ => Future.sequence(projects.flatMap(p => p.javaDone.future :: p.groups.map(_.done.future) ))) - Await.result(toWait, Duration.Inf) + + Await.result(Future.sequence(projects.map(_.compilationDone)), Duration.Inf) timer.stop() for (p <- projects) { @@ -144,7 +155,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } else println(f" Wall Clock: ${timer.durationMs}%.0f ms") case Pipeline => - val futures: immutable.Seq[Future[Unit]] = projects.map { p => + projects.foreach { p => val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map(task => p.dependencyReadyFuture(task))) val scalaCompiles: Future[Unit] = f1.map { _ => p.fullCompileExportPickles() } // Start javac after scalac has completely finished @@ -152,8 +163,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val javaCompiles: Future[Unit] = f2.map { _ => p.javaCompile() } scalaCompiles.flatMap(_ => javaCompiles) } - val toWait: Future[List[Unit]] = Future.sequence(futures).flatMap(_ => Future.sequence(projects.flatMap(p => p.javaDone.future :: p.groups.map(_.done.future) ))) - Await.result(toWait, Duration.Inf) + Await.result(Future.sequence(projects.map(_.compilationDone)), Duration.Inf) timer.stop() for (p <- projects) { @@ -171,17 +181,14 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } else println(f" Wall Clock: ${timer.durationMs}%.0f ms") case Traditional => - val futures = projects.map { p => + projects.foreach { p => val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map(_.javaDone.future)) - val shouldOutlineType = dependedOn(p) - p.shouldOutlineType = shouldOutlineType f1.flatMap { _ => p.fullCompile() Future.sequence(p.groups.map(_.done.future)).map(_ => p.javaCompile()) } } - val toWait: Future[List[Unit]] = Future.sequence(futures).flatMap(_ => Future.sequence(projects.flatMap(p => p.javaDone.future :: p.groups.map(_.done.future) ))) - Await.result(toWait, Duration.Inf) + Await.result(Future.sequence(projects.map(_.compilationDone)), Duration.Inf) timer.stop() for (p <- projects) { @@ -206,7 +213,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy def projectEvents(p: Task): List[String] = { val events = List.newBuilder[String] if (p.outlineTimer.durationMicros > 0d) { - val desc = if (p.shouldOutlineType) "outline-type" else "parser-to-pickler" + val desc = if (strategy == OutlineTypePipeline) "outline-type" else "parser-to-pickler" events += durationEvent(p.label, desc, p.outlineTimer) } for ((g, ix) <- p.groups.zipWithIndex) { @@ -274,12 +281,12 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val outlineTimer = new Timer() val javaTimer = new Timer() - var shouldOutlineType = true var outlineCriticalPathMs = 0d var regularCriticalPathMs = 0d var fullCriticalPathMs = 0d - val outlineDone = Promise[Unit]() - val javaDone = Promise[Unit]() + val outlineDone: Promise[Unit] = Promise[Unit]() + val javaDone: Promise[Unit] = Promise[Unit]() + def compilationDone: Future[List[Unit]] = Future.sequence(javaDone.future :: groups.map(_.done.future)) lazy val compiler: Global = { val result = newCompiler(command.settings) @@ -293,6 +300,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy def outlineCompile(): Unit = { outlineTimer.start() + log("scalac outline: start") command.settings.Youtline.value = true command.settings.stopAfter.value = List("pickler") command.settings.Ymacroexpand.value = command.settings.MacroExpand.None @@ -305,6 +313,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy outlineDone.complete(Failure(new RuntimeException("compile failed"))) else outlineDone.complete(Success(())) + log("scalac outline: done") } def fullCompile(): Unit = { @@ -312,9 +321,11 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy command.settings.stopAfter.value = Nil command.settings.Ymacroexpand.value = command.settings.MacroExpand.Normal - for (group <- groups) { + val groupCount = groups.size + for ((group, ix) <- groups.zipWithIndex) { group.done.completeWith { Future { + log(s"scalac (${ix + 1}/$groupCount): start") val compiler2 = newCompiler(command.settings) val run2 = new compiler2.Run() group.timer.start() @@ -326,6 +337,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } else { group.done.complete(Success(())) } + log(s"scalac (${ix + 1}/$groupCount): done") } } } @@ -376,9 +388,9 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } else { javaDone.complete(Success(())) } - log("javac: start") + log("javac: done") } - def log(msg: String) = Predef.println(this.label + ": " + msg) + def log(msg: String): Unit = Predef.println(this.label + ": " + msg) } final class Timer() { From 8cb0967bd6c733033426d183e30823e3628348e3 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 3 Dec 2018 13:10:13 +1000 Subject: [PATCH 34/66] Refactor, expose strategy and parallelism config --- src/compiler/scala/tools/nsc/PipelineMain.scala | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index e22c48a3804..dbb514a1c22 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -10,22 +10,20 @@ import java.nio.ByteBuffer import java.nio.file.{Files, Path, Paths} import java.util.Collections -import javax.tools.{SimpleJavaFileObject, ToolProvider} +import javax.tools.ToolProvider -import scala.collection.JavaConverters.asJavaIterableConverter -import scala.collection.{immutable, mutable, parallel} +import scala.collection.{mutable, parallel} +import scala.concurrent.duration.Duration import scala.concurrent.{Await, ExecutionContext, Future, Promise} import scala.reflect.internal.pickling.PickleBuffer import scala.reflect.internal.util.FakePos import scala.reflect.io.{VirtualDirectory, VirtualFile} -import scala.tools.nsc.backend.{ClassfileInfo, JavaPlatform, ScalaClass, ScalaRawClass} +import scala.tools.nsc.backend.{ClassfileInfo, ScalaClass, ScalaRawClass} import scala.tools.nsc.classpath.{DirectoryClassPath, VirtualDirectoryClassPath, ZipArchiveFileLookup} import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} import scala.tools.nsc.util.ClassPath import scala.util.{Failure, Success} -import scala.concurrent.duration.Duration -import scala.tools.nsc.classpath.ZipAndJarClassPathFactory.ZipArchiveClassPath class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy) { /** Forward errors to the (current) reporter. */ @@ -390,7 +388,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } log("javac: done") } - def log(msg: String): Unit = Predef.println(this.label + ": " + msg) + def log(msg: String): Unit = println(this.label + ": " + msg) } final class Timer() { @@ -466,7 +464,10 @@ case object Traditional extends BuildStrategy object PipelineMain { def main(args: Array[String]): Unit = { - val main = new PipelineMainClass("1", parallel.availableProcessors, Pipeline) + val strategies = List(OutlineTypePipeline, Pipeline, Traditional) + val strategy = strategies.find(_.productPrefix.equalsIgnoreCase(System.getProperty("scala.pipeline.strategy", "pipeline"))).get + val parallelism = java.lang.Integer.getInteger("scala.pipeline.parallelism", parallel.availableProcessors) + val main = new PipelineMainClass("1", parallelism, strategy) val result = main.process(args) if (!result) System.exit(1) From 43bbcc738493993e5fd01475b8744830768c59e2 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 4 Dec 2018 10:35:38 +1000 Subject: [PATCH 35/66] Avoid NPE with -Xshow-phases, etc --- .../scala/tools/nsc/PipelineMain.scala | 74 +++++++++++++++++-- 1 file changed, 67 insertions(+), 7 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index dbb514a1c22..4da71782ed3 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -7,8 +7,10 @@ package scala.tools.nsc import java.io.File import java.lang.Thread.UncaughtExceptionHandler import java.nio.ByteBuffer +import java.nio.channels.Channels import java.nio.file.{Files, Path, Paths} import java.util.Collections +import java.util.concurrent.locks.{Lock, ReadWriteLock} import javax.tools.ToolProvider @@ -26,6 +28,11 @@ import scala.tools.nsc.util.ClassPath import scala.util.{Failure, Success} class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy) { + private val pickleCacheConfigured = System.getProperty("scala.pipeline.picklecache") + private val pickleCache = { + new PickleCache(if (pickleCacheConfigured == null) Files.createTempDirectory("scala.picklecache") else Paths.get(pickleCacheConfigured)) + } + /** Forward errors to the (current) reporter. */ protected def scalacError(msg: String): Unit = { reporter.error(FakePos("scalac"), msg + "\n scalac -help gives more information") @@ -78,7 +85,6 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } } private val allPickleData = new java.util.concurrent.ConcurrentHashMap[Path, PickleClassPath[_]] - private val allParsedInfos = new java.util.concurrent.ConcurrentHashMap[AbstractFile, ClassfileInfo] def process(args: Array[String]): Boolean = { println(s"parallelism = $parallelism, strategy = $strategy") @@ -227,6 +233,9 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy projects.iterator.flatMap(projectEvents).addString(sb, ",\n") trace.append("]}") Files.write(Paths.get(s"build-${label}.trace"), trace.toString.getBytes()) + if (pickleCacheConfigured == null) { + AbstractFile.getDirectory(pickleCache.dir.toFile).delete() + } true } @@ -292,7 +301,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy if (reporter.hasErrors) reporter.flush() else if (command.shouldStopWithInfo) - reporter.echo(command.getInfoMessage(compiler)) + reporter.echo(command.getInfoMessage(result)) result } @@ -440,13 +449,11 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy else None case _ => None } - allParsedInfos.get(file) match { - case null => None - case info => Some(info) - } + + pickleCache.get(file) } override def parsed(file: AbstractFile, clazz: g.ClassSymbol, info: ClassfileInfo): Unit = { - allParsedInfos.put(file, info) + pickleCache.put(file, info) } } g.platform.addClassPathPlugin(plugin) @@ -495,3 +502,56 @@ object PipelineMainTest { System.exit(0) } } + +class PickleCache(val dir: Path) { + + private val PicklePattern = """(.*)\.pickle""".r + private val RawPattern = """(.*)\.raw""".r + def get(file: AbstractFile): Option[ClassfileInfo] = synchronized { + if (file.toString().contains("Plugin.class")) + getClass + file.underlyingSource match { + case Some(jar) => + val cachePath = dir.resolve("." + jar.file.toPath).normalize().resolve(file.path) + if (Files.exists(cachePath)) { + val it = Files.list(cachePath).iterator() + if (it.hasNext) { + val f = it.next() + val name = f.getFileName + name.toString match { + case PicklePattern(className) => + val bytes = Files.readAllBytes(f) + Some(ScalaClass(className, () => ByteBuffer.wrap(bytes))) + case RawPattern(className) => + val bytes = Files.readAllBytes(f) + Some(backend.ScalaRawClass(className)) + case _ => None + } + } else None + } else None + case None => + None + } + } + def put(file: AbstractFile, info: ClassfileInfo): Unit = { + if (file.toString().contains("Plugin.class")) + getClass + + file.underlyingSource match { + case Some(jar) => + val cachePath = dir.resolve("." + jar.file.toPath).normalize().resolve(file.path) + info match { + case ScalaClass(className, pickle) => + Files.createDirectories(cachePath) + val ch = Channels.newChannel(Files.newOutputStream(cachePath.resolve(className + ".pickle"))) + try ch.write(pickle()) + finally ch.close() + case ScalaRawClass(className) => + Files.createDirectories(cachePath) + Files.write(cachePath.resolve(className + ".raw"), Array[Byte]()) + case _ => + } + case None => + } + } +} \ No newline at end of file From c3a73dae5c1027d7b0375e5e84e8de4907876b58 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 4 Dec 2018 11:03:54 +1000 Subject: [PATCH 36/66] Sequence futures in the right order --- src/compiler/scala/tools/nsc/PipelineMain.scala | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 4da71782ed3..ebc30696cf1 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -293,7 +293,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy var fullCriticalPathMs = 0d val outlineDone: Promise[Unit] = Promise[Unit]() val javaDone: Promise[Unit] = Promise[Unit]() - def compilationDone: Future[List[Unit]] = Future.sequence(javaDone.future :: groups.map(_.done.future)) + def compilationDone: Future[List[Unit]] = Future.sequence(outlineDone.future :: (groups.map(_.done.future) :+ javaDone.future)) lazy val compiler: Global = { val result = newCompiler(command.settings) @@ -316,11 +316,13 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy allPickleData.put(command.settings.outputDirs.getSingleOutput.get.file.toPath.toRealPath().normalize(), new PickleClassPath(run1.symData)) outlineTimer.stop() reporter.finish() - if (reporter.hasErrors) + if (reporter.hasErrors) { + log("scalac outline: failed") outlineDone.complete(Failure(new RuntimeException("compile failed"))) - else + } else { + log("scalac outline: done") outlineDone.complete(Success(())) - log("scalac outline: done") + } } def fullCompile(): Unit = { @@ -371,10 +373,13 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy run2 compile group.files compiler.reporter.finish() group.timer.stop() - log("scalac: done") if (compiler.reporter.hasErrors) { + log("scalac: failed") + outlineDone.complete(Failure(new RuntimeException("Compile failed"))) group.done.complete(Failure(new RuntimeException("Compile failed"))) } else { + log("scalac: done") +// outlineDone.complete(Success(())) group.done.complete(Success(())) } } From e547271a4cd6bf9f7a189538cd788560ac074d7b Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 4 Dec 2018 11:35:20 +1000 Subject: [PATCH 37/66] Fixups --- .../scala/tools/nsc/PipelineMain.scala | 103 ++++++++++-------- .../scala/reflect/internal/Definitions.scala | 2 +- 2 files changed, 56 insertions(+), 49 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index ebc30696cf1..c2f14d37f93 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -10,13 +10,14 @@ import java.nio.ByteBuffer import java.nio.channels.Channels import java.nio.file.{Files, Path, Paths} import java.util.Collections +import java.util.concurrent.TimeUnit import java.util.concurrent.locks.{Lock, ReadWriteLock} import javax.tools.ToolProvider import scala.collection.{mutable, parallel} import scala.concurrent.duration.Duration -import scala.concurrent.{Await, ExecutionContext, Future, Promise} +import scala.concurrent._ import scala.reflect.internal.pickling.PickleBuffer import scala.reflect.internal.util.FakePos import scala.reflect.io.{VirtualDirectory, VirtualFile} @@ -117,6 +118,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy for { _ <- depsReady _ <- { + p.outlineDone.complete(Success(())) p.fullCompile() Future.sequence(p.groups.map(_.done.future)) } @@ -188,6 +190,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy projects.foreach { p => val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map(_.javaDone.future)) f1.flatMap { _ => + p.outlineDone.complete(Success(())) p.fullCompile() Future.sequence(p.groups.map(_.done.future)).map(_ => p.javaCompile()) } @@ -426,42 +429,44 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy protected def newCompiler(settings: Settings): Global = { val g = Global(settings) - val plugin: g.platform.ClassPathPlugin = new g.platform.ClassPathPlugin { - val replacements = mutable.Buffer[PickleClassPath[_]]() - def replaceInternalClassPath(cp: ClassPath, underlying: Path): List[ClassPath] = { - allPickleData.get(underlying.toRealPath().normalize()) match { - case null => - cp :: Nil - case pcp => - replacements += pcp - pcp.classpath :: Nil + if (strategy != Traditional) { + val plugin: g.platform.ClassPathPlugin = new g.platform.ClassPathPlugin { + val replacements = mutable.Buffer[PickleClassPath[_]]() + def replaceInternalClassPath(cp: ClassPath, underlying: Path): List[ClassPath] = { + allPickleData.get(underlying.toRealPath().normalize()) match { + case null => + cp :: Nil + case pcp => + replacements += pcp + pcp.classpath :: Nil + } } - } - override def modifyClassPath(classPath: Seq[ClassPath]): Seq[ClassPath] = { - classPath.flatMap { - case zcp: ZipArchiveFileLookup[_] => replaceInternalClassPath(zcp, zcp.zipFile.toPath) - case dcp: DirectoryClassPath => replaceInternalClassPath(dcp, dcp.dir.toPath) - case cp => cp :: Nil + override def modifyClassPath(classPath: Seq[ClassPath]): Seq[ClassPath] = { + classPath.flatMap { + case zcp: ZipArchiveFileLookup[_] => replaceInternalClassPath(zcp, zcp.zipFile.toPath) + case dcp: DirectoryClassPath => replaceInternalClassPath(dcp, dcp.dir.toPath) + case cp => cp :: Nil + } } - } - override def info(file: AbstractFile, clazz: g.ClassSymbol): Option[ClassfileInfo] = { - file match { - case vf: VirtualFile if vf.getClass == classOf[VirtualFile] => - val iterator = replacements.iterator.flatMap(_.classInfo.get(vf)) - if (iterator.hasNext) - return Some(iterator.next()) - else None - case _ => None - } + override def info(file: AbstractFile, clazz: g.ClassSymbol): Option[ClassfileInfo] = { + file match { + case vf: VirtualFile if vf.getClass == classOf[VirtualFile] => + val iterator = replacements.iterator.flatMap(_.classInfo.get(vf)) + if (iterator.hasNext) + return Some(iterator.next()) + else None + case _ => None + } - pickleCache.get(file) - } - override def parsed(file: AbstractFile, clazz: g.ClassSymbol, info: ClassfileInfo): Unit = { - pickleCache.put(file, info) + pickleCache.get(file) + } + override def parsed(file: AbstractFile, clazz: g.ClassSymbol, info: ClassfileInfo): Unit = { + pickleCache.put(file, info) + } } + g.platform.addClassPathPlugin(plugin) } - g.platform.addClassPathPlugin(plugin) g } @@ -519,29 +524,31 @@ class PickleCache(val dir: Path) { case Some(jar) => val cachePath = dir.resolve("." + jar.file.toPath).normalize().resolve(file.path) if (Files.exists(cachePath)) { - val it = Files.list(cachePath).iterator() - if (it.hasNext) { - val f = it.next() - val name = f.getFileName - name.toString match { - case PicklePattern(className) => - val bytes = Files.readAllBytes(f) - Some(ScalaClass(className, () => ByteBuffer.wrap(bytes))) - case RawPattern(className) => - val bytes = Files.readAllBytes(f) - Some(backend.ScalaRawClass(className)) - case _ => None - } - } else None + val listing = Files.list(cachePath) + try { + val it = listing.iterator() + if (it.hasNext) { + val f = it.next() + val name = f.getFileName + name.toString match { + case PicklePattern(className) => + val bytes = Files.readAllBytes(f) + Some(ScalaClass(className, () => ByteBuffer.wrap(bytes))) + case RawPattern(className) => + val bytes = Files.readAllBytes(f) + Some(backend.ScalaRawClass(className)) + case _ => None + } + } else None + } finally { + listing.close() + } } else None case None => None } } def put(file: AbstractFile, info: ClassfileInfo): Unit = { - if (file.toString().contains("Plugin.class")) - getClass - file.underlyingSource match { case Some(jar) => val cachePath = dir.resolve("." + jar.file.toPath).normalize().resolve(file.path) diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 05aebaf3ca1..c233fd4ffbd 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -1236,7 +1236,7 @@ trait Definitions extends api.StandardDefinitions { lazy val AnnotationDefaultAttr: ClassSymbol = { val sym = RuntimePackageClass.newClassSymbol(tpnme.AnnotationDefaultATTR, NoPosition, 0L) - sym setInfo ClassInfoType(List(AnnotationClass.tpe), newScope, sym) + sym setInfo ClassInfoType(List(StaticAnnotationClass.tpe), newScope, sym) markAllCompleted(sym) RuntimePackageClass.info.decls.toList.filter(_.name == sym.name) match { case existing :: _ => From 451068eb9724d524d568946f499cb762decd71ac Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 4 Dec 2018 14:22:33 +1000 Subject: [PATCH 38/66] Also store intra-project pickles on disk --- .../scala/tools/nsc/PipelineMain.scala | 213 +++++++++--------- 1 file changed, 112 insertions(+), 101 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index c2f14d37f93..336c03b2788 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -10,14 +10,12 @@ import java.nio.ByteBuffer import java.nio.channels.Channels import java.nio.file.{Files, Path, Paths} import java.util.Collections -import java.util.concurrent.TimeUnit -import java.util.concurrent.locks.{Lock, ReadWriteLock} import javax.tools.ToolProvider import scala.collection.{mutable, parallel} -import scala.concurrent.duration.Duration import scala.concurrent._ +import scala.concurrent.duration.Duration import scala.reflect.internal.pickling.PickleBuffer import scala.reflect.internal.util.FakePos import scala.reflect.io.{VirtualDirectory, VirtualFile} @@ -40,20 +38,23 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } private var reporter: Reporter = _ + private object handler extends UncaughtExceptionHandler { override def uncaughtException(t: Thread, e: Throwable): Unit = { e.printStackTrace() System.exit(-1) } } + implicit val executor = ExecutionContext.fromExecutor(new java.util.concurrent.ForkJoinPool(parallelism), t => handler.uncaughtException(Thread.currentThread(), t)) val fileManager = ToolProvider.getSystemJavaCompiler.getStandardFileManager(null, null, null) - private class PickleClassPath[G <: Global](data: mutable.AnyRefMap[G#Symbol, PickleBuffer]) { - val dir = new VirtualDirectory("fakes", None) - val classpath = VirtualDirectoryClassPath(dir) + def pickleClassPath[G <: Global](output: AbstractFile, data: mutable.AnyRefMap[G#Symbol, PickleBuffer]): ClassPath = { + val cachePath: Path = pickleCache.cachePath(output) + Files.createDirectories(cachePath) + val dir = AbstractFile.getDirectory(cachePath.toFile) + val dirs = mutable.Map[G#Symbol, AbstractFile]() - val classInfo = mutable.Map[AbstractFile, ClassfileInfo]() def packageDir(packSymbol: G#Symbol): AbstractFile = { if (packSymbol.isEmptyPackageClass) dir else if (dirs.contains(packSymbol)) dirs(packSymbol) @@ -70,22 +71,19 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } for ((symbol, pickle) <- data) { val base = packageDir(symbol.owner) - if (symbol.isClass) { - val primary = base.fileNamed(symbol.encodedName + ".class") - classInfo(primary) = ScalaClass(symbol.fullNameString, () => ByteBuffer.wrap(pickle.bytes)) - if (symbol.companionModule.exists) { - val secondary = base.fileNamed(symbol.companionModule.encodedName + "$.class") - classInfo(secondary) = ScalaRawClass(symbol.companionModule.fullNameString) - } - } else if (symbol.isModule) { - val primary = base.fileNamed(symbol.encodedName + ".class") - classInfo(primary) = ScalaClass(symbol.fullNameString, () => ByteBuffer.wrap(pickle.bytes)) - val secondary = base.fileNamed(symbol.companionModule.encodedName + "$.class") - classInfo(secondary) = ScalaRawClass(symbol.companionModule.fullNameString) - } + val primary = base.fileNamed(symbol.encodedName + ".class") + pickleCache.put(primary, ScalaClass(symbol.fullNameString, () => ByteBuffer.wrap(pickle.bytes))) + + // TODO is this needed? + // if (symbol.companionModule.exists) { + // val secondary = base.fileNamed(symbol.companionModule.encodedName + "$.class") + // pickleCache.put(secondary, ScalaRawClass(symbol.companionModule.fullNameString)) + // } } + DirectoryClassPath(dir.file) } - private val allPickleData = new java.util.concurrent.ConcurrentHashMap[Path, PickleClassPath[_]] + + private val allPickleData = new java.util.concurrent.ConcurrentHashMap[Path, ClassPath] def process(args: Array[String]): Boolean = { println(s"parallelism = $parallelism, strategy = $strategy") @@ -93,10 +91,11 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy reporter = new ConsoleReporter(new Settings(scalacError)) def commandFor(argFileArg: String): Task = { - val ss = new Settings(scalacError) + val ss = new Settings(scalacError) val command = new CompilerCommand(("@" + argFileArg) :: Nil, ss) Task(argFileArg, command, command.files) } + val projects: List[Task] = args.toList.map(commandFor) val produces = mutable.HashMap[Path, Task]() for (p <- projects) { @@ -111,7 +110,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy timer.start() strategy match { case OutlineTypePipeline => - projects.foreach {p => + projects.foreach { p => val isLeaf = !dependedOn.contains(p) val depsReady = Future.sequence(dependsOn.getOrElse(p, Nil).map { task => p.dependencyReadyFuture(task) }) if (isLeaf) { @@ -148,7 +147,9 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy for (p <- projects) { val dependencies = dependsOn(p) + def maxByOrZero[A](as: List[A])(f: A => Double): Double = if (as.isEmpty) 0d else as.map(f).max + val maxOutlineCriticalPathMs = maxByOrZero(dependencies)(_.outlineCriticalPathMs) p.outlineCriticalPathMs = maxOutlineCriticalPathMs + p.outlineTimer.durationMs p.regularCriticalPathMs = maxOutlineCriticalPathMs + maxByOrZero(p.groups)(_.timer.durationMs) @@ -174,7 +175,9 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy for (p <- projects) { val dependencies = dependsOn(p) + def maxByOrZero[A](as: List[A])(f: A => Double): Double = if (as.isEmpty) 0d else as.map(f).max + val maxOutlineCriticalPathMs = maxByOrZero(dependencies)(_.outlineCriticalPathMs) p.outlineCriticalPathMs = maxOutlineCriticalPathMs + p.outlineTimer.durationMs p.regularCriticalPathMs = maxOutlineCriticalPathMs + maxByOrZero(p.groups)(_.timer.durationMs) @@ -200,7 +203,9 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy for (p <- projects) { val dependencies = dependsOn(p) + def maxByOrZero[A](as: List[A])(f: A => Double): Double = if (as.isEmpty) 0d else as.map(f).max + p.fullCriticalPathMs = maxByOrZero(dependencies)(_.fullCriticalPathMs) + p.groups.map(_.timer.durationMs).sum } if (parallelism == 1) { @@ -214,9 +219,11 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val trace = new java.lang.StringBuilder() trace.append("""{"traceEvents": [""") val sb = new mutable.StringBuilder(trace) + def durationEvent(name: String, cat: String, t: Timer): String = { s"""{"name": "$name", "cat": "$cat", "ph": "X", "ts": ${(t.startMicros).toLong}, "dur": ${(t.durationMicros).toLong}, "pid": 0, "tid": ${t.thread.getId}}""" } + def projectEvents(p: Task): List[String] = { val events = List.newBuilder[String] if (p.outlineTimer.durationMicros > 0d) { @@ -233,6 +240,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } events.result() } + projects.iterator.flatMap(projectEvents).addString(sb, ",\n") trace.append("]}") Files.write(Paths.get(s"build-${label}.trace"), trace.toString.getBytes()) @@ -246,6 +254,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val timer = new Timer val done = Promise[Unit]() } + private case class Task(argsFile: String, command: CompilerCommand, files: List[String]) { val label = argsFile.replaceAll("target/", "").replaceAll("""(.*)/(.*).args""", "$1:$2") override def toString: String = argsFile @@ -258,10 +267,11 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy def macroClassPathSet: Set[Path] = macroClassPath.toSet def pluginClassPath: Set[Path] = { def asPath(p: String) = ClassPath split p - val paths = command.settings.plugin.value filter (_ != "") flatMap (s => asPath(s) map (s => Paths.get(s))) + + val paths = command.settings.plugin.value filter (_ != "") flatMap (s => asPath(s) map (s => Paths.get(s))) paths.toSet } - def dependencyReadyFuture(dependency: Task) = if (macroClassPathSet.contains(dependency.outputDir)) { + def dependencyReadyFuture(dependency: Task) = if (macroClassPathSet.contains(dependency.outputDir)) { log(s"dependency is on macro classpath, will wait for .class files: ${dependency.label}") dependency.javaDone.future } else if (pluginClassPath.contains(dependency.outputDir)) { @@ -316,7 +326,8 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy command.settings.Ymacroexpand.value = command.settings.MacroExpand.None val run1 = new compiler.Run() run1 compile files - allPickleData.put(command.settings.outputDirs.getSingleOutput.get.file.toPath.toRealPath().normalize(), new PickleClassPath(run1.symData)) + val outDir = command.settings.outputDirs.getSingleOutput.get + allPickleData.put(outDir.file.toPath.toRealPath().normalize(), pickleClassPath(outDir, run1.symData)) outlineTimer.stop() reporter.finish() if (reporter.hasErrors) { @@ -363,7 +374,8 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val run2 = new compiler.Run() { override def advancePhase(): Unit = { if (compiler.phase == this.picklerPhase) { - allPickleData.put(command.settings.outputDirs.getSingleOutput.get.file.toPath.toRealPath().normalize(), new PickleClassPath(symData)) + val outDir = command.settings.outputDirs.getSingleOutput.get + allPickleData.put(outDir.file.toPath.toRealPath().normalize(), pickleClassPath(outDir, symData)) outlineTimer.stop() outlineDone.complete(Success(())) group.timer.start() @@ -373,17 +385,24 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } } - run2 compile group.files - compiler.reporter.finish() - group.timer.stop() - if (compiler.reporter.hasErrors) { - log("scalac: failed") - outlineDone.complete(Failure(new RuntimeException("Compile failed"))) - group.done.complete(Failure(new RuntimeException("Compile failed"))) - } else { - log("scalac: done") -// outlineDone.complete(Success(())) - group.done.complete(Success(())) + try { + run2 compile group.files + compiler.reporter.finish() + group.timer.stop() + if (compiler.reporter.hasErrors) { + log("scalac: failed") + outlineDone.complete(Failure(new RuntimeException("Compile failed"))) + group.done.complete(Failure(new RuntimeException("Compile failed"))) + } else { + log("scalac: done") + // outlineDone.complete(Success(())) + group.done.complete(Success(())) + } + } catch { + case t: Throwable => + t.printStackTrace() + outlineDone.complete(Failure(new RuntimeException("Compile failed"))) + group.done.complete(Failure(new RuntimeException("Compile failed"))) } } @@ -431,14 +450,14 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy if (strategy != Traditional) { val plugin: g.platform.ClassPathPlugin = new g.platform.ClassPathPlugin { - val replacements = mutable.Buffer[PickleClassPath[_]]() + val replacements = mutable.Buffer[ClassPath]() def replaceInternalClassPath(cp: ClassPath, underlying: Path): List[ClassPath] = { allPickleData.get(underlying.toRealPath().normalize()) match { case null => cp :: Nil case pcp => replacements += pcp - pcp.classpath :: Nil + pcp :: Nil } } override def modifyClassPath(classPath: Seq[ClassPath]): Seq[ClassPath] = { @@ -450,15 +469,6 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } override def info(file: AbstractFile, clazz: g.ClassSymbol): Option[ClassfileInfo] = { - file match { - case vf: VirtualFile if vf.getClass == classOf[VirtualFile] => - val iterator = replacements.iterator.flatMap(_.classInfo.get(vf)) - if (iterator.hasNext) - return Some(iterator.next()) - else None - case _ => None - } - pickleCache.get(file) } override def parsed(file: AbstractFile, clazz: g.ClassSymbol, info: ClassfileInfo): Unit = { @@ -473,9 +483,12 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } sealed abstract class BuildStrategy + /** Outline type check to compute type signatures as pickles as an input to downstream compilation. */ case object OutlineTypePipeline extends BuildStrategy + case object Pipeline extends BuildStrategy + /** Emit class files before triggering downstream compilation */ case object Traditional extends BuildStrategy @@ -497,17 +510,17 @@ object PipelineMainTest { def main(args: Array[String]): Unit = { var i = 0 for (_ <- 1 to 10; n <- List(parallel.availableProcessors); strat <- List(Pipeline, OutlineTypePipeline, Traditional)) { - i += 1 - val main = new PipelineMainClass(i.toString, n, strat) - println(s"====== ITERATION $i=======") - val result = main.process(Array( - "/code/boxer/macros/target/compile.args", - "/code/boxer/plugin/target/compile.args", - "/code/boxer/support/target/compile.args", - "/code/boxer/use-macro/target/compile.args", - "/code/boxer/use-plugin/target/compile.args")) - if (!result) - System.exit(1) + i += 1 + val main = new PipelineMainClass(i.toString, n, strat) + println(s"====== ITERATION $i=======") + val result = main.process(Array( + "/code/boxer/macros/target/compile.args", + "/code/boxer/plugin/target/compile.args", + "/code/boxer/support/target/compile.args", + "/code/boxer/use-macro/target/compile.args", + "/code/boxer/use-plugin/target/compile.args")) + if (!result) + System.exit(1) } System.exit(0) } @@ -518,52 +531,50 @@ class PickleCache(val dir: Path) { private val PicklePattern = """(.*)\.pickle""".r private val RawPattern = """(.*)\.raw""".r def get(file: AbstractFile): Option[ClassfileInfo] = synchronized { - if (file.toString().contains("Plugin.class")) - getClass - file.underlyingSource match { - case Some(jar) => - val cachePath = dir.resolve("." + jar.file.toPath).normalize().resolve(file.path) - if (Files.exists(cachePath)) { - val listing = Files.list(cachePath) - try { - val it = listing.iterator() - if (it.hasNext) { - val f = it.next() - val name = f.getFileName - name.toString match { - case PicklePattern(className) => - val bytes = Files.readAllBytes(f) - Some(ScalaClass(className, () => ByteBuffer.wrap(bytes))) - case RawPattern(className) => - val bytes = Files.readAllBytes(f) - Some(backend.ScalaRawClass(className)) - case _ => None - } - } else None - } finally { - listing.close() + val cachePath = this.cachePath(file) + if (Files.exists(cachePath)) { + val listing = Files.list(cachePath) + try { + val it = listing.iterator() + if (it.hasNext) { + val f = it.next() + val name = f.getFileName + name.toString match { + case PicklePattern(className) => + val bytes = Files.readAllBytes(f) + Some(ScalaClass(className, () => ByteBuffer.wrap(bytes))) + case RawPattern(className) => + val bytes = Files.readAllBytes(f) + Some(backend.ScalaRawClass(className)) + case _ => None } } else None - case None => - None + } finally { + listing.close() + } + } else None + } + def cachePath(file: AbstractFile): Path = { + file.underlyingSource match { + case Some(jar) if jar ne file => + dir.resolve("." + jar.file.toPath).normalize().resolve(file.path) + case _ => + dir.resolve("./" + file.path).normalize() } } + def put(file: AbstractFile, info: ClassfileInfo): Unit = { - file.underlyingSource match { - case Some(jar) => - val cachePath = dir.resolve("." + jar.file.toPath).normalize().resolve(file.path) - info match { - case ScalaClass(className, pickle) => - Files.createDirectories(cachePath) - val ch = Channels.newChannel(Files.newOutputStream(cachePath.resolve(className + ".pickle"))) - try ch.write(pickle()) - finally ch.close() - case ScalaRawClass(className) => - Files.createDirectories(cachePath) - Files.write(cachePath.resolve(className + ".raw"), Array[Byte]()) - case _ => - } - case None => + val cachePath = this.cachePath(file) + info match { + case ScalaClass(className, pickle) => + Files.createDirectories(cachePath) + val ch = Channels.newChannel(Files.newOutputStream(cachePath.resolve(className + ".pickle"))) + try ch.write(pickle()) + finally ch.close() + case ScalaRawClass(className) => + Files.createDirectories(cachePath) + Files.write(cachePath.resolve(className + ".raw"), Array[Byte]()) + case _ => } } } \ No newline at end of file From 27d300a432f9c1ab6857076c22e1e941e70c6d4c Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 4 Dec 2018 14:29:35 +1000 Subject: [PATCH 39/66] Refactor --- .../scala/tools/nsc/PipelineMain.scala | 34 +++++++++++++------ 1 file changed, 24 insertions(+), 10 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 336c03b2788..c5eef046a0e 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -106,6 +106,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy dependsOn(p) = (p.classPath ++ p.pluginClassPath).flatMap(s => produces.get(s)).toList.filterNot(_ == p) } val dependedOn: Set[Task] = dependsOn.valuesIterator.flatten.toSet + val timer = new Timer timer.start() strategy match { @@ -163,12 +164,17 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy println(f" Wall Clock: ${timer.durationMs}%.0f ms") case Pipeline => projects.foreach { p => - val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map(task => p.dependencyReadyFuture(task))) - val scalaCompiles: Future[Unit] = f1.map { _ => p.fullCompileExportPickles() } - // Start javac after scalac has completely finished - val f2 = Future.sequence[Unit, List](p.groups.map(_.done.future)) - val javaCompiles: Future[Unit] = f2.map { _ => p.javaCompile() } - scalaCompiles.flatMap(_ => javaCompiles) + val depsReady = Future.sequence(dependsOn.getOrElse(p, Nil).map(task => p.dependencyReadyFuture(task))) + for { + _ <- depsReady + _ <- { + p.fullCompileExportPickles() + // Start javac after scalac has completely finished + Future.sequence(p.groups.map(_.done.future)) + } + } yield { + p.javaCompile() + } } Await.result(Future.sequence(projects.map(_.compilationDone)), Duration.Inf) timer.stop() @@ -216,6 +222,18 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } } + writeChromeTrace(projects) + deleteTempPickleCache() + true + } + + private def deleteTempPickleCache(): Unit = { + if (pickleCacheConfigured == null) { + AbstractFile.getDirectory(pickleCache.dir.toFile).delete() + } + } + + private def writeChromeTrace(projects: List[Task]) = { val trace = new java.lang.StringBuilder() trace.append("""{"traceEvents": [""") val sb = new mutable.StringBuilder(trace) @@ -244,10 +262,6 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy projects.iterator.flatMap(projectEvents).addString(sb, ",\n") trace.append("]}") Files.write(Paths.get(s"build-${label}.trace"), trace.toString.getBytes()) - if (pickleCacheConfigured == null) { - AbstractFile.getDirectory(pickleCache.dir.toFile).delete() - } - true } case class Group(files: List[String]) { From 8ca0e9be8431cbb74232ad31739b6c1ba4650afd Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 4 Dec 2018 15:44:22 +1000 Subject: [PATCH 40/66] Enable classpath caching, even for directories --- .../scala/tools/nsc/PipelineMain.scala | 59 +++++++++++-------- .../scala/tools/nsc/plugins/Plugins.scala | 10 +--- 2 files changed, 37 insertions(+), 32 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index c5eef046a0e..6e96f2c355e 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -8,6 +8,7 @@ import java.io.File import java.lang.Thread.UncaughtExceptionHandler import java.nio.ByteBuffer import java.nio.channels.Channels +import java.nio.file.attribute.FileTime import java.nio.file.{Files, Path, Paths} import java.util.Collections @@ -18,9 +19,8 @@ import scala.concurrent._ import scala.concurrent.duration.Duration import scala.reflect.internal.pickling.PickleBuffer import scala.reflect.internal.util.FakePos -import scala.reflect.io.{VirtualDirectory, VirtualFile} import scala.tools.nsc.backend.{ClassfileInfo, ScalaClass, ScalaRawClass} -import scala.tools.nsc.classpath.{DirectoryClassPath, VirtualDirectoryClassPath, ZipArchiveFileLookup} +import scala.tools.nsc.classpath.{DirectoryClassPath, ZipArchiveFileLookup} import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} import scala.tools.nsc.util.ClassPath @@ -49,7 +49,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy implicit val executor = ExecutionContext.fromExecutor(new java.util.concurrent.ForkJoinPool(parallelism), t => handler.uncaughtException(Thread.currentThread(), t)) val fileManager = ToolProvider.getSystemJavaCompiler.getStandardFileManager(null, null, null) - def pickleClassPath[G <: Global](output: AbstractFile, data: mutable.AnyRefMap[G#Symbol, PickleBuffer]): ClassPath = { + def registerPickleClassPath[G <: Global](output: AbstractFile, data: mutable.AnyRefMap[G#Symbol, PickleBuffer]): Unit = { val cachePath: Path = pickleCache.cachePath(output) Files.createDirectories(cachePath) val dir = AbstractFile.getDirectory(cachePath.toFile) @@ -80,7 +80,8 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy // pickleCache.put(secondary, ScalaRawClass(symbol.companionModule.fullNameString)) // } } - DirectoryClassPath(dir.file) + val classpath = DirectoryClassPath(dir.file) + allPickleData.put(output.file.toPath.toRealPath().normalize(), classpath) } private val allPickleData = new java.util.concurrent.ConcurrentHashMap[Path, ClassPath] @@ -295,7 +296,8 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy dependency.outlineDone.future - command.settings.YcacheMacroClassLoader.value = "none" + command.settings.YcacheMacroClassLoader.value = "last-modified" + command.settings.YcachePluginClassLoader.value = "last-modified" val groups: List[Group] = { val isScalaLibrary = files.exists(_.endsWith("Predef.scala")) @@ -334,22 +336,27 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy def outlineCompile(): Unit = { outlineTimer.start() - log("scalac outline: start") - command.settings.Youtline.value = true - command.settings.stopAfter.value = List("pickler") - command.settings.Ymacroexpand.value = command.settings.MacroExpand.None - val run1 = new compiler.Run() - run1 compile files - val outDir = command.settings.outputDirs.getSingleOutput.get - allPickleData.put(outDir.file.toPath.toRealPath().normalize(), pickleClassPath(outDir, run1.symData)) - outlineTimer.stop() - reporter.finish() - if (reporter.hasErrors) { - log("scalac outline: failed") - outlineDone.complete(Failure(new RuntimeException("compile failed"))) - } else { - log("scalac outline: done") - outlineDone.complete(Success(())) + try { + log("scalac outline: start") + command.settings.Youtline.value = true + command.settings.stopAfter.value = List("pickler") + command.settings.Ymacroexpand.value = command.settings.MacroExpand.None + val run1 = new compiler.Run() + run1 compile files + registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get, run1.symData) + outlineTimer.stop() + reporter.finish() + if (reporter.hasErrors) { + log("scalac outline: failed") + outlineDone.complete(Failure(new RuntimeException("compile failed"))) + } else { + log("scalac outline: done") + outlineDone.complete(Success(())) + } + } catch { + case t: Throwable => + t.printStackTrace() + outlineDone.complete(Failure(new RuntimeException("compile failed"))) } } @@ -388,8 +395,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val run2 = new compiler.Run() { override def advancePhase(): Unit = { if (compiler.phase == this.picklerPhase) { - val outDir = command.settings.outputDirs.getSingleOutput.get - allPickleData.put(outDir.file.toPath.toRealPath().normalize(), pickleClassPath(outDir, symData)) + registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get, symData) outlineTimer.stop() outlineDone.complete(Success(())) group.timer.start() @@ -461,6 +467,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy protected def newCompiler(settings: Settings): Global = { val g = Global(settings) + Files.setLastModifiedTime(settings.outputDirs.getSingleOutput.get.file.toPath, FileTime.fromMillis(System.currentTimeMillis())) if (strategy != Traditional) { val plugin: g.platform.ClassPathPlugin = new g.platform.ClassPathPlugin { @@ -532,7 +539,9 @@ object PipelineMainTest { "/code/boxer/plugin/target/compile.args", "/code/boxer/support/target/compile.args", "/code/boxer/use-macro/target/compile.args", - "/code/boxer/use-plugin/target/compile.args")) + "/code/boxer/use-plugin/target/compile.args", + "/code/boxer/use-macro2/target/compile.args", + "/code/boxer/use-plugin2/target/compile.args")) if (!result) System.exit(1) } @@ -585,9 +594,11 @@ class PickleCache(val dir: Path) { val ch = Channels.newChannel(Files.newOutputStream(cachePath.resolve(className + ".pickle"))) try ch.write(pickle()) finally ch.close() + Files.setLastModifiedTime(cachePath, Files.getLastModifiedTime(file.underlyingSource.get.file.toPath)) case ScalaRawClass(className) => Files.createDirectories(cachePath) Files.write(cachePath.resolve(className + ".raw"), Array[Byte]()) + Files.setLastModifiedTime(cachePath, Files.getLastModifiedTime(file.underlyingSource.get.file.toPath)) case _ => } } diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index ac8f301885b..83bd1f4a872 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -84,7 +84,7 @@ trait Plugins { global: Global => // mitigate the cost of dynamic classloading as it has been // measured in https://github.com/scala/scala-dev/issues/458. - if (disableCache || classpath.exists(!Jar.isJarOrZip(_))) { + if (disableCache) { val loader = newLoader() closeableRegistry.registerClosable(loader) loader @@ -196,13 +196,7 @@ trait Plugins { global: Global => perRunCaches.recordClassloader(newLoader()) } else { val locations = urlsAndFiles.map(t => Path(t._2.file)) - val nonJarZips = locations.filterNot(Jar.isJarOrZip(_)) - if (nonJarZips.nonEmpty) { - analyzer.macroLogVerbose(s"macro classloader: caching is disabled because the following paths are not supported: ${nonJarZips.mkString(",")}.") - perRunCaches.recordClassloader(newLoader()) - } else { - Macros.macroClassLoadersCache.getOrCreate(locations.map(_.jfile.toPath()), newLoader, closeableRegistry) - } + Macros.macroClassLoadersCache.getOrCreate(locations.map(_.jfile.toPath()), newLoader, closeableRegistry) } } } From 92a8ab9b5dc7a0fd320a499bd2c5430ed525acf9 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 4 Dec 2018 15:51:18 +1000 Subject: [PATCH 41/66] Add an "always" cache policy for classloaders --- src/compiler/scala/tools/nsc/PipelineMain.scala | 4 ++-- .../nsc/classpath/ZipAndJarFileLookupFactory.scala | 6 +++--- src/compiler/scala/tools/nsc/plugins/Plugins.scala | 10 ++++++---- .../scala/tools/nsc/settings/ScalaSettings.scala | 3 ++- 4 files changed, 13 insertions(+), 10 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 6e96f2c355e..eaddadc2aec 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -296,8 +296,8 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy dependency.outlineDone.future - command.settings.YcacheMacroClassLoader.value = "last-modified" - command.settings.YcachePluginClassLoader.value = "last-modified" + command.settings.YcacheMacroClassLoader.value = "always" + command.settings.YcachePluginClassLoader.value = "always" val groups: List[Group] = { val isScalaLibrary = files.exists(_.endsWith("Predef.scala")) diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala index 31855236150..4d0d3505d83 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala @@ -39,7 +39,7 @@ sealed trait ZipAndJarFileLookupFactory { closeableRegistry.registerClosable(result) result } else { - cache.getOrCreate(List(zipFile.file.toPath), () => createForZipFile(zipFile, settings.releaseValue), closeableRegistry) + cache.getOrCreate(List(zipFile.file.toPath), () => createForZipFile(zipFile, settings.releaseValue), closeableRegistry, checkStamps = true) } } @@ -213,7 +213,7 @@ final class FileBasedCache[T] { } private val cache = collection.mutable.Map.empty[Seq[Path], Entry] - def getOrCreate(paths: Seq[Path], create: () => T, closeableRegistry: CloseableRegistry): T = cache.synchronized { + def getOrCreate(paths: Seq[Path], create: () => T, closeableRegistry: CloseableRegistry, checkStamps: Boolean): T = cache.synchronized { val stamps = paths.map { path => val attrs = Files.readAttributes(path, classOf[BasicFileAttributes]) val lastModified = attrs.lastModifiedTime() @@ -224,7 +224,7 @@ final class FileBasedCache[T] { cache.get(paths) match { case Some(e@Entry(cachedStamps, cached)) => - if (cachedStamps == stamps) { + if (!checkStamps || cachedStamps == stamps) { // Cache hit e.referenceCount.incrementAndGet() closeableRegistry.registerClosable(e.referenceCountDecrementer) diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index 83bd1f4a872..0146a978540 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -69,7 +69,8 @@ trait Plugins { global: Global => * @return */ protected def findPluginClassLoader(classpath: Seq[Path]): ClassLoader = { - val disableCache = settings.YcachePluginClassLoader.value == settings.CachePolicy.None.name + val policy = settings.YcachePluginClassLoader.value + val disableCache = policy == settings.CachePolicy.None.name def newLoader = () => { val compilerLoader = classOf[Plugin].getClassLoader val urls = classpath map (_.toURL) @@ -88,7 +89,7 @@ trait Plugins { global: Global => val loader = newLoader() closeableRegistry.registerClosable(loader) loader - } else pluginClassLoadersCache.getOrCreate(classpath.map(_.jfile.toPath()), newLoader, closeableRegistry) + } else pluginClassLoadersCache.getOrCreate(classpath.map(_.jfile.toPath()), newLoader, closeableRegistry, policy == settings.CachePolicy.LastModified.name) } protected lazy val roughPluginsList: List[Plugin] = loadRoughPluginsList() @@ -181,7 +182,8 @@ trait Plugins { global: Global => ScalaClassLoader.fromURLs(classpath, getClass.getClassLoader) } - val disableCache = settings.YcacheMacroClassLoader.value == settings.CachePolicy.None.name + val policy = settings.YcacheMacroClassLoader.value + val disableCache = policy == settings.CachePolicy.None.name if (disableCache) newLoader() else { import scala.tools.nsc.io.Jar @@ -196,7 +198,7 @@ trait Plugins { global: Global => perRunCaches.recordClassloader(newLoader()) } else { val locations = urlsAndFiles.map(t => Path(t._2.file)) - Macros.macroClassLoadersCache.getOrCreate(locations.map(_.jfile.toPath()), newLoader, closeableRegistry) + Macros.macroClassLoadersCache.getOrCreate(locations.map(_.jfile.toPath()), newLoader, closeableRegistry, checkStamps = policy == settings.CachePolicy.LastModified.name) } } } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 6c27c89d350..7b2c441fc52 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -260,9 +260,10 @@ trait ScalaSettings extends AbsScalaSettings def setting(style: String, styleLong: String) = ChoiceSetting(s"-Ycache-$style-class-loader", "policy", s"Policy for caching class loaders for $styleLong that are dynamically loaded.", values.map(_.name), None.name, values.map(_.help)) object None extends CachePolicy("none", "Don't cache class loader") object LastModified extends CachePolicy("last-modified", "Cache class loader, using file last-modified time to invalidate") + object Always extends CachePolicy("always", "Cache class loader with no invalidation") // TODO Jorge to add new policy. Think about whether there is a benefit to the user on offering this as a separate policy or unifying with the previous one. // object ZipMetadata extends CachePolicy("zip-metadata", "Cache classloade, using file last-modified time, then ZIP file metadata to invalidate") - def values: List[CachePolicy] = List(None, LastModified) + def values: List[CachePolicy] = List(None, LastModified, Always) } object optChoices extends MultiChoiceEnumeration { From b3a9abe5e2a4bab7256629de9769141558ba06a3 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 25 Sep 2018 16:34:15 +1000 Subject: [PATCH 42/66] Defer the pattern matching phase until after refchecks Continuing from #6552. The motivation is run the pickler phase earlier, as its intermediate output can be used as an input to downstream compilation in a pipelined build architecture. (cherry picked from commit 8fd8e367ed89312a73c2d87d4543c8c012145c82) --- src/compiler/scala/tools/nsc/Global.scala | 23 ++++++++++--------- .../scala/tools/nsc/PipelineMain.scala | 3 ++- test/files/neg/t6446-additional.check | 10 ++++---- test/files/neg/t6446-missing.check | 10 ++++---- test/files/neg/t6446-show-phases.check | 10 ++++---- test/files/neg/t7494-no-options.check | 10 ++++---- test/files/run/programmatic-main.check | 10 ++++---- test/files/run/t1434.check | 3 +++ test/files/run/t6288.check | 2 +- 9 files changed, 43 insertions(+), 38 deletions(-) create mode 100644 test/files/run/t1434.check diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index efcfd8d2a35..49a00cba857 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -487,19 +487,10 @@ class Global(var currentSettings: Settings, reporter0: Reporter) val global: Global.this.type = Global.this } with Analyzer - // phaseName = "patmat" - object patmat extends { - val global: Global.this.type = Global.this - val runsAfter = List("typer") - val runsRightAfter = None - // patmat doesn't need to be right after typer, as long as we run before superaccessors - // (sbt does need to run right after typer, so don't conflict) - } with PatternMatching - // phaseName = "superaccessors" object superAccessors extends { val global: Global.this.type = Global.this - val runsAfter = List("patmat") + val runsAfter = List("typer") val runsRightAfter = None } with SuperAccessors @@ -524,10 +515,20 @@ class Global(var currentSettings: Settings, reporter0: Reporter) val runsRightAfter = None } with RefChecks + // phaseName = "patmat" + object patmat extends { + val global: Global.this.type = Global.this + // patmat does not need to run before the superaccessors phase, because + // patmat never emits `this.x` where `x` is a ParamAccessor. + // (However, patmat does need to run before outer accessors generation). + val runsAfter = List("refchecks") + val runsRightAfter = None + } with PatternMatching + // phaseName = "uncurry" override object uncurry extends { val global: Global.this.type = Global.this - val runsAfter = List("refchecks") + val runsAfter = List("patmat") val runsRightAfter = None } with UnCurry diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index eaddadc2aec..8571017d992 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -394,7 +394,8 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy outlineTimer.start() val run2 = new compiler.Run() { override def advancePhase(): Unit = { - if (compiler.phase == this.picklerPhase) { + val phase = compiler.phase + if (phase == this.picklerPhase) { registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get, symData) outlineTimer.stop() outlineDone.complete(Success(())) diff --git a/test/files/neg/t6446-additional.check b/test/files/neg/t6446-additional.check index 9d4af37b987..5b48c1e79fa 100644 --- a/test/files/neg/t6446-additional.check +++ b/test/files/neg/t6446-additional.check @@ -4,11 +4,11 @@ namer 2 resolve names, attach symbols to named trees packageobjects 3 load package objects typer 4 the meat and potatoes: type the trees - patmat 5 translate match expressions -superaccessors 6 add super accessors in traits and nested classes - extmethods 7 add extension methods for inline classes - pickler 8 serialize symbol tables - refchecks 9 reference/override checking, translate nested objects +superaccessors 5 add super accessors in traits and nested classes + extmethods 6 add extension methods for inline classes + pickler 7 serialize symbol tables + refchecks 8 reference/override checking, translate nested objects + patmat 9 translate match expressions uncurry 10 uncurry, translate function values to anonymous classes fields 11 synthesize accessors and fields, add bitmaps for lazy vals tailcalls 12 replace tail calls by jumps diff --git a/test/files/neg/t6446-missing.check b/test/files/neg/t6446-missing.check index 65b5e5dc096..d8e822cecd1 100644 --- a/test/files/neg/t6446-missing.check +++ b/test/files/neg/t6446-missing.check @@ -5,11 +5,11 @@ Error: unable to load class: t6446.Ploogin namer 2 resolve names, attach symbols to named trees packageobjects 3 load package objects typer 4 the meat and potatoes: type the trees - patmat 5 translate match expressions -superaccessors 6 add super accessors in traits and nested classes - extmethods 7 add extension methods for inline classes - pickler 8 serialize symbol tables - refchecks 9 reference/override checking, translate nested objects +superaccessors 5 add super accessors in traits and nested classes + extmethods 6 add extension methods for inline classes + pickler 7 serialize symbol tables + refchecks 8 reference/override checking, translate nested objects + patmat 9 translate match expressions uncurry 10 uncurry, translate function values to anonymous classes fields 11 synthesize accessors and fields, add bitmaps for lazy vals tailcalls 12 replace tail calls by jumps diff --git a/test/files/neg/t6446-show-phases.check b/test/files/neg/t6446-show-phases.check index 373f63e5b25..436a0264359 100644 --- a/test/files/neg/t6446-show-phases.check +++ b/test/files/neg/t6446-show-phases.check @@ -4,11 +4,11 @@ namer 2 resolve names, attach symbols to named trees packageobjects 3 load package objects typer 4 the meat and potatoes: type the trees - patmat 5 translate match expressions -superaccessors 6 add super accessors in traits and nested classes - extmethods 7 add extension methods for inline classes - pickler 8 serialize symbol tables - refchecks 9 reference/override checking, translate nested objects +superaccessors 5 add super accessors in traits and nested classes + extmethods 6 add extension methods for inline classes + pickler 7 serialize symbol tables + refchecks 8 reference/override checking, translate nested objects + patmat 9 translate match expressions uncurry 10 uncurry, translate function values to anonymous classes fields 11 synthesize accessors and fields, add bitmaps for lazy vals tailcalls 12 replace tail calls by jumps diff --git a/test/files/neg/t7494-no-options.check b/test/files/neg/t7494-no-options.check index 1bf5c237119..9fd03ab0ba0 100644 --- a/test/files/neg/t7494-no-options.check +++ b/test/files/neg/t7494-no-options.check @@ -5,11 +5,11 @@ error: Error: ploogin takes no options namer 2 resolve names, attach symbols to named trees packageobjects 3 load package objects typer 4 the meat and potatoes: type the trees - patmat 5 translate match expressions -superaccessors 6 add super accessors in traits and nested classes - extmethods 7 add extension methods for inline classes - pickler 8 serialize symbol tables - refchecks 9 reference/override checking, translate nested objects +superaccessors 5 add super accessors in traits and nested classes + extmethods 6 add extension methods for inline classes + pickler 7 serialize symbol tables + refchecks 8 reference/override checking, translate nested objects + patmat 9 translate match expressions uncurry 10 uncurry, translate function values to anonymous classes fields 11 synthesize accessors and fields, add bitmaps for lazy vals tailcalls 12 replace tail calls by jumps diff --git a/test/files/run/programmatic-main.check b/test/files/run/programmatic-main.check index 373f63e5b25..436a0264359 100644 --- a/test/files/run/programmatic-main.check +++ b/test/files/run/programmatic-main.check @@ -4,11 +4,11 @@ namer 2 resolve names, attach symbols to named trees packageobjects 3 load package objects typer 4 the meat and potatoes: type the trees - patmat 5 translate match expressions -superaccessors 6 add super accessors in traits and nested classes - extmethods 7 add extension methods for inline classes - pickler 8 serialize symbol tables - refchecks 9 reference/override checking, translate nested objects +superaccessors 5 add super accessors in traits and nested classes + extmethods 6 add extension methods for inline classes + pickler 7 serialize symbol tables + refchecks 8 reference/override checking, translate nested objects + patmat 9 translate match expressions uncurry 10 uncurry, translate function values to anonymous classes fields 11 synthesize accessors and fields, add bitmaps for lazy vals tailcalls 12 replace tail calls by jumps diff --git a/test/files/run/t1434.check b/test/files/run/t1434.check new file mode 100644 index 00000000000..8483ac94f7c --- /dev/null +++ b/test/files/run/t1434.check @@ -0,0 +1,3 @@ +t1434.scala:7: warning: comparing values of types Null and Null using `!=' will always yield false + case a: A[_] if(a.op != null) => "with op" + ^ diff --git a/test/files/run/t6288.check b/test/files/run/t6288.check index 86b6b95628e..955e27b0aad 100644 --- a/test/files/run/t6288.check +++ b/test/files/run/t6288.check @@ -5,7 +5,7 @@ [106][106][106]Case3.super.(); [13]() }; - [21]def unapply([29]z: [32]): [21]Option[Int] = [56][52][52]scala.Some.apply[[52]Int]([57]-1); + [21]def unapply([29]z: [32]): [21]Option[Int] = [56][52][52]new [52]Some[Int]([57]-1); [64]{ [64]case val x1: [64]String = [64]""; [64]case5()[84]{ From 061dbedb374fd722f4bc5b53038eb05dbb8d3470 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 5 Dec 2018 11:19:35 +1000 Subject: [PATCH 43/66] Write .dot file with project dependencies --- .../scala/tools/nsc/PipelineMain.scala | 45 ++++++++++++++----- 1 file changed, 35 insertions(+), 10 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 8571017d992..952ec57dc30 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -86,6 +86,26 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy private val allPickleData = new java.util.concurrent.ConcurrentHashMap[Path, ClassPath] + def writeDotFile(dependsOn: mutable.LinkedHashMap[Task, List[Dependency]]): Unit = { + val builder = new java.lang.StringBuilder() + builder.append("digraph projects {\n") + for ((p, deps) <- dependsOn) { + //builder.append(" node \"[]").append(p.label).append("\";\n") + for (dep <- deps) { + builder.append(" \"").append(p.label).append("\" -> \"").append(dep.t.label).append("\" [") + if (dep.isMacro) builder.append("label=M") + else if (dep.isPlugin) builder.append("label=P") + builder.append("];\n") + } + } + builder.append("}\n") + val path = Paths.get("projects.dot") + Files.write(path, builder.toString.getBytes(java.nio.charset.StandardCharsets.UTF_8)) + println("Wrote project dependency to: " + path) + } + + private case class Dependency(t: Task, isMacro: Boolean, isPlugin: Boolean) + def process(args: Array[String]): Boolean = { println(s"parallelism = $parallelism, strategy = $strategy") @@ -98,15 +118,20 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } val projects: List[Task] = args.toList.map(commandFor) - val produces = mutable.HashMap[Path, Task]() + val produces = mutable.LinkedHashMap[Path, Task]() for (p <- projects) { produces(p.outputDir) = p } - val dependsOn = mutable.HashMap[Task, List[Task]]() + val dependsOn = mutable.LinkedHashMap[Task, List[Dependency]]() for (p <- projects) { - dependsOn(p) = (p.classPath ++ p.pluginClassPath).flatMap(s => produces.get(s)).toList.filterNot(_ == p) + val macroDeps = p.macroClassPath.flatMap(p => produces.get(p)).toList.filterNot(_ == p).map(t => Dependency(t, isMacro = true, isPlugin = false)) + val pluginDeps = p.pluginClassPath.flatMap(p => produces.get(p)).toList.filterNot(_ == p).map(t => Dependency(t, isMacro = false, isPlugin = true)) + val classPathDeps = p.classPath.flatMap(p => produces.get(p)).toList.filterNot(_ == p).filterNot(p => macroDeps.exists(_.t == p)).map(t => Dependency(t, isMacro = false, isPlugin = false)) + dependsOn(p) = classPathDeps ++ macroDeps ++ pluginDeps } - val dependedOn: Set[Task] = dependsOn.valuesIterator.flatten.toSet + val dependedOn: Set[Task] = dependsOn.valuesIterator.flatten.map(_.t).toSet + + writeDotFile(dependsOn) val timer = new Timer timer.start() @@ -114,7 +139,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy case OutlineTypePipeline => projects.foreach { p => val isLeaf = !dependedOn.contains(p) - val depsReady = Future.sequence(dependsOn.getOrElse(p, Nil).map { task => p.dependencyReadyFuture(task) }) + val depsReady = Future.sequence(dependsOn.getOrElse(p, Nil).map { task => p.dependencyReadyFuture(task.t) }) if (isLeaf) { for { _ <- depsReady @@ -148,7 +173,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy timer.stop() for (p <- projects) { - val dependencies = dependsOn(p) + val dependencies = dependsOn(p).map(_.t) def maxByOrZero[A](as: List[A])(f: A => Double): Double = if (as.isEmpty) 0d else as.map(f).max @@ -165,7 +190,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy println(f" Wall Clock: ${timer.durationMs}%.0f ms") case Pipeline => projects.foreach { p => - val depsReady = Future.sequence(dependsOn.getOrElse(p, Nil).map(task => p.dependencyReadyFuture(task))) + val depsReady = Future.sequence(dependsOn.getOrElse(p, Nil).map(task => p.dependencyReadyFuture(task.t))) for { _ <- depsReady _ <- { @@ -181,7 +206,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy timer.stop() for (p <- projects) { - val dependencies = dependsOn(p) + val dependencies = dependsOn(p).map(_.t) def maxByOrZero[A](as: List[A])(f: A => Double): Double = if (as.isEmpty) 0d else as.map(f).max @@ -198,7 +223,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy println(f" Wall Clock: ${timer.durationMs}%.0f ms") case Traditional => projects.foreach { p => - val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map(_.javaDone.future)) + val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map(_.t.javaDone.future)) f1.flatMap { _ => p.outlineDone.complete(Success(())) p.fullCompile() @@ -209,7 +234,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy timer.stop() for (p <- projects) { - val dependencies = dependsOn(p) + val dependencies = dependsOn(p).map(_.t) def maxByOrZero[A](as: List[A])(f: A => Double): Double = if (as.isEmpty) 0d else as.map(f).max From f2714955bd84c47e0851320f1e1f2849635989e0 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 5 Dec 2018 11:19:46 +1000 Subject: [PATCH 44/66] Revert "Defer the pattern matching phase until after refchecks" This reverts commit b3a9abe5e2a4bab7256629de9769141558ba06a3. --- src/compiler/scala/tools/nsc/Global.scala | 23 +++++++++---------- .../scala/tools/nsc/PipelineMain.scala | 3 +-- test/files/neg/t6446-additional.check | 10 ++++---- test/files/neg/t6446-missing.check | 10 ++++---- test/files/neg/t6446-show-phases.check | 10 ++++---- test/files/neg/t7494-no-options.check | 10 ++++---- test/files/run/programmatic-main.check | 10 ++++---- test/files/run/t1434.check | 3 --- test/files/run/t6288.check | 2 +- 9 files changed, 38 insertions(+), 43 deletions(-) delete mode 100644 test/files/run/t1434.check diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 49a00cba857..efcfd8d2a35 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -487,10 +487,19 @@ class Global(var currentSettings: Settings, reporter0: Reporter) val global: Global.this.type = Global.this } with Analyzer + // phaseName = "patmat" + object patmat extends { + val global: Global.this.type = Global.this + val runsAfter = List("typer") + val runsRightAfter = None + // patmat doesn't need to be right after typer, as long as we run before superaccessors + // (sbt does need to run right after typer, so don't conflict) + } with PatternMatching + // phaseName = "superaccessors" object superAccessors extends { val global: Global.this.type = Global.this - val runsAfter = List("typer") + val runsAfter = List("patmat") val runsRightAfter = None } with SuperAccessors @@ -515,20 +524,10 @@ class Global(var currentSettings: Settings, reporter0: Reporter) val runsRightAfter = None } with RefChecks - // phaseName = "patmat" - object patmat extends { - val global: Global.this.type = Global.this - // patmat does not need to run before the superaccessors phase, because - // patmat never emits `this.x` where `x` is a ParamAccessor. - // (However, patmat does need to run before outer accessors generation). - val runsAfter = List("refchecks") - val runsRightAfter = None - } with PatternMatching - // phaseName = "uncurry" override object uncurry extends { val global: Global.this.type = Global.this - val runsAfter = List("patmat") + val runsAfter = List("refchecks") val runsRightAfter = None } with UnCurry diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 952ec57dc30..0f6090a815b 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -419,8 +419,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy outlineTimer.start() val run2 = new compiler.Run() { override def advancePhase(): Unit = { - val phase = compiler.phase - if (phase == this.picklerPhase) { + if (compiler.phase == this.picklerPhase) { registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get, symData) outlineTimer.stop() outlineDone.complete(Success(())) diff --git a/test/files/neg/t6446-additional.check b/test/files/neg/t6446-additional.check index 5b48c1e79fa..9d4af37b987 100644 --- a/test/files/neg/t6446-additional.check +++ b/test/files/neg/t6446-additional.check @@ -4,11 +4,11 @@ namer 2 resolve names, attach symbols to named trees packageobjects 3 load package objects typer 4 the meat and potatoes: type the trees -superaccessors 5 add super accessors in traits and nested classes - extmethods 6 add extension methods for inline classes - pickler 7 serialize symbol tables - refchecks 8 reference/override checking, translate nested objects - patmat 9 translate match expressions + patmat 5 translate match expressions +superaccessors 6 add super accessors in traits and nested classes + extmethods 7 add extension methods for inline classes + pickler 8 serialize symbol tables + refchecks 9 reference/override checking, translate nested objects uncurry 10 uncurry, translate function values to anonymous classes fields 11 synthesize accessors and fields, add bitmaps for lazy vals tailcalls 12 replace tail calls by jumps diff --git a/test/files/neg/t6446-missing.check b/test/files/neg/t6446-missing.check index d8e822cecd1..65b5e5dc096 100644 --- a/test/files/neg/t6446-missing.check +++ b/test/files/neg/t6446-missing.check @@ -5,11 +5,11 @@ Error: unable to load class: t6446.Ploogin namer 2 resolve names, attach symbols to named trees packageobjects 3 load package objects typer 4 the meat and potatoes: type the trees -superaccessors 5 add super accessors in traits and nested classes - extmethods 6 add extension methods for inline classes - pickler 7 serialize symbol tables - refchecks 8 reference/override checking, translate nested objects - patmat 9 translate match expressions + patmat 5 translate match expressions +superaccessors 6 add super accessors in traits and nested classes + extmethods 7 add extension methods for inline classes + pickler 8 serialize symbol tables + refchecks 9 reference/override checking, translate nested objects uncurry 10 uncurry, translate function values to anonymous classes fields 11 synthesize accessors and fields, add bitmaps for lazy vals tailcalls 12 replace tail calls by jumps diff --git a/test/files/neg/t6446-show-phases.check b/test/files/neg/t6446-show-phases.check index 436a0264359..373f63e5b25 100644 --- a/test/files/neg/t6446-show-phases.check +++ b/test/files/neg/t6446-show-phases.check @@ -4,11 +4,11 @@ namer 2 resolve names, attach symbols to named trees packageobjects 3 load package objects typer 4 the meat and potatoes: type the trees -superaccessors 5 add super accessors in traits and nested classes - extmethods 6 add extension methods for inline classes - pickler 7 serialize symbol tables - refchecks 8 reference/override checking, translate nested objects - patmat 9 translate match expressions + patmat 5 translate match expressions +superaccessors 6 add super accessors in traits and nested classes + extmethods 7 add extension methods for inline classes + pickler 8 serialize symbol tables + refchecks 9 reference/override checking, translate nested objects uncurry 10 uncurry, translate function values to anonymous classes fields 11 synthesize accessors and fields, add bitmaps for lazy vals tailcalls 12 replace tail calls by jumps diff --git a/test/files/neg/t7494-no-options.check b/test/files/neg/t7494-no-options.check index 9fd03ab0ba0..1bf5c237119 100644 --- a/test/files/neg/t7494-no-options.check +++ b/test/files/neg/t7494-no-options.check @@ -5,11 +5,11 @@ error: Error: ploogin takes no options namer 2 resolve names, attach symbols to named trees packageobjects 3 load package objects typer 4 the meat and potatoes: type the trees -superaccessors 5 add super accessors in traits and nested classes - extmethods 6 add extension methods for inline classes - pickler 7 serialize symbol tables - refchecks 8 reference/override checking, translate nested objects - patmat 9 translate match expressions + patmat 5 translate match expressions +superaccessors 6 add super accessors in traits and nested classes + extmethods 7 add extension methods for inline classes + pickler 8 serialize symbol tables + refchecks 9 reference/override checking, translate nested objects uncurry 10 uncurry, translate function values to anonymous classes fields 11 synthesize accessors and fields, add bitmaps for lazy vals tailcalls 12 replace tail calls by jumps diff --git a/test/files/run/programmatic-main.check b/test/files/run/programmatic-main.check index 436a0264359..373f63e5b25 100644 --- a/test/files/run/programmatic-main.check +++ b/test/files/run/programmatic-main.check @@ -4,11 +4,11 @@ namer 2 resolve names, attach symbols to named trees packageobjects 3 load package objects typer 4 the meat and potatoes: type the trees -superaccessors 5 add super accessors in traits and nested classes - extmethods 6 add extension methods for inline classes - pickler 7 serialize symbol tables - refchecks 8 reference/override checking, translate nested objects - patmat 9 translate match expressions + patmat 5 translate match expressions +superaccessors 6 add super accessors in traits and nested classes + extmethods 7 add extension methods for inline classes + pickler 8 serialize symbol tables + refchecks 9 reference/override checking, translate nested objects uncurry 10 uncurry, translate function values to anonymous classes fields 11 synthesize accessors and fields, add bitmaps for lazy vals tailcalls 12 replace tail calls by jumps diff --git a/test/files/run/t1434.check b/test/files/run/t1434.check deleted file mode 100644 index 8483ac94f7c..00000000000 --- a/test/files/run/t1434.check +++ /dev/null @@ -1,3 +0,0 @@ -t1434.scala:7: warning: comparing values of types Null and Null using `!=' will always yield false - case a: A[_] if(a.op != null) => "with op" - ^ diff --git a/test/files/run/t6288.check b/test/files/run/t6288.check index 955e27b0aad..86b6b95628e 100644 --- a/test/files/run/t6288.check +++ b/test/files/run/t6288.check @@ -5,7 +5,7 @@ [106][106][106]Case3.super.(); [13]() }; - [21]def unapply([29]z: [32]): [21]Option[Int] = [56][52][52]new [52]Some[Int]([57]-1); + [21]def unapply([29]z: [32]): [21]Option[Int] = [56][52][52]scala.Some.apply[[52]Int]([57]-1); [64]{ [64]case val x1: [64]String = [64]""; [64]case5()[84]{ From 26e95470ce19bdb7b1463bceb9d67da60f4867e5 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 5 Dec 2018 13:32:06 +1000 Subject: [PATCH 45/66] Better output --- src/compiler/scala/tools/nsc/PipelineMain.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 0f6090a815b..921bfce4c00 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -101,7 +101,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy builder.append("}\n") val path = Paths.get("projects.dot") Files.write(path, builder.toString.getBytes(java.nio.charset.StandardCharsets.UTF_8)) - println("Wrote project dependency to: " + path) + println("Wrote project dependency graph to: " + path.toAbsolutePath) } private case class Dependency(t: Task, isMacro: Boolean, isPlugin: Boolean) From 271fdc2b0d87090ea1be45afbaaffe2412997e18 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 5 Dec 2018 18:22:15 +1000 Subject: [PATCH 46/66] Fix ref counting of cache entries --- .../classpath/ZipAndJarFileLookupFactory.scala | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala index 4d0d3505d83..6e7182e656b 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala @@ -24,6 +24,7 @@ import scala.reflect.io.{AbstractFile, FileZipArchive, ManifestResources} import scala.tools.nsc.util.{ClassPath, ClassRepresentation} import scala.tools.nsc.{CloseableRegistry, Settings} import FileUtils._ +import scala.reflect.internal.SymbolTable /** * A trait providing an optional cache for classpath entries obtained from zip and jar files. @@ -203,7 +204,8 @@ final class FileBasedCache[T] { val count = referenceCount.decrementAndGet() if (count == 0) { t match { - case cl: Closeable => FileBasedCache.deferredClose(referenceCount, cl) + case cl: Closeable => + FileBasedCache.deferredClose(referenceCount, cl) case _ => } } @@ -226,9 +228,17 @@ final class FileBasedCache[T] { case Some(e@Entry(cachedStamps, cached)) => if (!checkStamps || cachedStamps == stamps) { // Cache hit - e.referenceCount.incrementAndGet() - closeableRegistry.registerClosable(e.referenceCountDecrementer) - cached + val count = e.referenceCount.incrementAndGet() + if (count == 1) { + // Closed, recreate. + val value = create() + val entry = Entry(stamps, value) + cache.put(paths, entry) + value + } else { + closeableRegistry.registerClosable(e.referenceCountDecrementer) + cached + } } else { // Cache miss: we found an entry but the underlying files have been modified cached match { From 219377cf8752b03deac900c2ffa9d540d8899b31 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 5 Dec 2018 18:23:20 +1000 Subject: [PATCH 47/66] Remove test of soon-to-be reworked classpath extension API --- .../tools/nsc/classpath/ClassPluginTest.scala | 60 ------------------- 1 file changed, 60 deletions(-) delete mode 100644 test/junit/scala/tools/nsc/classpath/ClassPluginTest.scala diff --git a/test/junit/scala/tools/nsc/classpath/ClassPluginTest.scala b/test/junit/scala/tools/nsc/classpath/ClassPluginTest.scala deleted file mode 100644 index 346c58d9ca6..00000000000 --- a/test/junit/scala/tools/nsc/classpath/ClassPluginTest.scala +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright (c) 2018 Lightbend. All rights reserved. - */ -package scala.tools.nsc.classpath - -import java.nio.ByteBuffer - -import org.junit.Assert.assertEquals -import org.junit.Test -import org.junit.runner.RunWith -import org.junit.runners.JUnit4 - -import scala.reflect.io.VirtualDirectory -import scala.tools.nsc.backend.{ClassfileInfo, ScalaClass} -import scala.tools.nsc.io.AbstractFile -import scala.tools.nsc.symtab.SymbolTableForUnitTesting -import scala.tools.nsc.util.ClassPath -import scala.tools.testing.BytecodeTesting -import scala.tools.testing.BytecodeTesting.makeSourceFile - -@RunWith(classOf[JUnit4]) -class ClassPluginTest extends BytecodeTesting { - // We use this.compiler to generate Scala pickles... - override def compilerArgs = "-Ystop-after:pickler" - - // ... and this one to read them with a ClassPathPlugin - object symbolTable extends SymbolTableForUnitTesting { - val fakeClasses = Map( - "fake.C" -> ScalaClass("fake.C", () => pickleOf("package fake; class C { def foo = 42 }")) - ) - private val fakes = new VirtualDirectory("fakes", None) - fakes.subdirectoryNamed("fake").fileNamed("C.class") - - lazy val classpathPlugin = new platform.ClassPathPlugin { - override def modifyClassPath(classPath: Seq[ClassPath]): Seq[ClassPath] = { - // Add a classpath entry with the fake/C.class - VirtualDirectoryClassPath(fakes) +: classPath - } - - override def info(file: AbstractFile, clazz: ClassSymbol): Option[ClassfileInfo] = - fakeClasses.get(clazz.fullNameString) - } - this.platform.addClassPathPlugin(classpathPlugin) - } - - @Test def classPathPluginTest(): Unit = { - import symbolTable._ - val CClass = rootMirror.getRequiredClass("fake.C") - val C_tpe = CClass.info - assertEquals("def foo: Int", definitions.fullyInitializeSymbol(C_tpe.decl(TermName("foo"))).defString) - } - - private def pickleOf(code: String): ByteBuffer = { - import compiler._ - val run = newRun - run.compileSources(makeSourceFile(code, "unitTestSource.scala") :: Nil) - val pickle = run.symData.toList.head._2 - ByteBuffer.wrap(pickle.bytes, 0, pickle.writeIndex) - } -} From 32f8458b700066d50e853a3b3f213b4a814454c4 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 5 Dec 2018 18:23:47 +1000 Subject: [PATCH 48/66] Add utility to extract pickles from JAR/directories --- .../scala/tools/nsc/PickleExtractor.scala | 152 ++++++++++++++++++ .../scala/tools/nsc/classpath/FileUtils.scala | 2 +- 2 files changed, 153 insertions(+), 1 deletion(-) create mode 100644 src/compiler/scala/tools/nsc/PickleExtractor.scala diff --git a/src/compiler/scala/tools/nsc/PickleExtractor.scala b/src/compiler/scala/tools/nsc/PickleExtractor.scala new file mode 100644 index 00000000000..a9bccdaf794 --- /dev/null +++ b/src/compiler/scala/tools/nsc/PickleExtractor.scala @@ -0,0 +1,152 @@ +package scala.tools.nsc + +import java.io.Closeable +import java.nio.file.attribute.BasicFileAttributes +import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor, _} + +import scala.collection.JavaConverters.{asScalaBufferConverter, bufferAsJavaListConverter} +import scala.reflect.internal.pickling.ByteCodecs +import scala.tools.asm.tree.ClassNode +import scala.tools.asm.{ClassReader, ClassWriter, Opcodes} + +object PickleExtractor { + + abstract class RootPath extends Closeable { + def root: Path + } + + def rootPath(path: Path, writable: Boolean): RootPath = { + if (path.getFileName.toString.endsWith(".jar")) { + import java.net.URI + val zipFile = URI.create("jar:file:" + path.toUri.getPath) + val env = new java.util.HashMap[String, String]() + if (!Files.exists(path.getParent)) + Files.createDirectories(path.getParent) + if (writable) { + env.put("create", "true") + if (Files.exists(path)) + Files.delete(path) + } + val zipfs = FileSystems.newFileSystem(zipFile, env) + new RootPath { + def root = zipfs.getRootDirectories.iterator().next() + def close(): Unit = { + zipfs.close() + } + } + } else { + new RootPath { + override def root: Path = path + override def close(): Unit = () + } + } + } + def main(args: Array[String]): Unit = { + args.toList match { + case input :: output :: Nil => + process(Paths.get(input), Paths.get(output)) + case _ => + } + } + def process(input: Path, output: Path): Unit = { + val inputPath = rootPath(input, writable = false) + val outputPath = rootPath(output, writable = true) + try { + val root = inputPath.root + Files.createDirectories(outputPath.root) + val visitor = new SimpleFileVisitor[Path] { + override def preVisitDirectory(dir: Path, attrs: BasicFileAttributes): FileVisitResult = { + if (dir != root) { + val outputDir = outputPath.root.resolve(root.relativize(dir).toString) + Files.createDirectories(outputDir) + } + FileVisitResult.CONTINUE + } + override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = { + if (file.getFileName.toString.endsWith(".class")) { + stripClassFile(Files.readAllBytes(file)) match { + case Class(out) => + Files.write(outputPath.root.resolve(root.relativize(file).toString), out) + case Pickle(out) => + Files.write(outputPath.root.resolve(root.relativize(file).toString.replaceAll(".class$", ".sig")), out) + case Skip => + } + } + FileVisitResult.CONTINUE + } + } + Files.walkFileTree(root, visitor) + } finally { + inputPath.close() + outputPath.close() + } + } + + def stripClassFile(classfile: Array[Byte]): OutputFile = { + val input = new ClassNode() + new ClassReader(classfile).accept(input, ClassReader.SKIP_DEBUG | ClassReader.SKIP_FRAMES | ClassReader.SKIP_CODE) + var output = new ClassNode() + output.name = input.name + output.access = input.access + output.version = input.version + + var foundScalaSig = false + + def isScalaAnnotation(desc: String) = (desc == "Lscala/reflect/ScalaSignature;" || desc == "Lscala/reflect/ScalaLongSignature;") && { + foundScalaSig = true + + true + } + + var pickleData: Array[Byte] = null + if (input.visibleAnnotations != null) { + input.visibleAnnotations.asScala.foreach { node => + if (node.desc == "Lscala/reflect/ScalaSignature;") { + val Array("bytes", data: String) = node.values.toArray() + val bytes = data.getBytes(java.nio.charset.StandardCharsets.UTF_8) + val len = ByteCodecs.decode(bytes) + pickleData = bytes.take(len) + } else if (node.desc == "Lscala/reflect/ScalaLongSignature;") { + val Array("bytes", data: Array[String]) = node.values.toArray() + val encoded = data flatMap (_.getBytes(java.nio.charset.StandardCharsets.UTF_8)) + val len = ByteCodecs.decode(encoded) + pickleData = encoded.take(len) + } + } + output.visibleAnnotations = input.visibleAnnotations.asScala.filter(node => isScalaAnnotation(node.desc) && { + node; + node.values + true + }).asJava + } + var foundScalaAttr = false + if (input.attrs != null) { + output.attrs = input.attrs.asScala.filter(attr => (attr.`type` == "Scala" || attr.`type` == "ScalaSig") && { + foundScalaAttr = true; + true + }).asJava + } + val writer = new ClassWriter(Opcodes.ASM7_EXPERIMENTAL) + val isScalaRaw = foundScalaAttr && !foundScalaSig + if (isScalaRaw) Skip + else { + if (pickleData == null) { + output = input + output.accept(writer) + Class(writer.toByteArray) + } else { + output.accept(writer) + Pickle(pickleData) + } + } + } + + sealed abstract class OutputFile + + case object Skip extends OutputFile + + case class Class(content: Array[Byte]) extends OutputFile + + case class Pickle(content: Array[Byte]) extends OutputFile + +} diff --git a/src/compiler/scala/tools/nsc/classpath/FileUtils.scala b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala index 059a83da796..aa4d8173619 100644 --- a/src/compiler/scala/tools/nsc/classpath/FileUtils.scala +++ b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala @@ -24,7 +24,7 @@ object FileUtils { implicit class AbstractFileOps(val file: AbstractFile) extends AnyVal { def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.name) - def isClass: Boolean = !file.isDirectory && file.hasExtension("class") + def isClass: Boolean = !file.isDirectory && (file.hasExtension("class") || file.hasExtension("sig")) def isScalaOrJavaSource: Boolean = !file.isDirectory && (file.hasExtension("scala") || file.hasExtension("java")) From 5fb3b9af38cea42a4ee137ff5c8c951cf2dced1d Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 5 Dec 2018 18:25:43 +1000 Subject: [PATCH 49/66] Support .sig files in the classpath They can either be a stripped .class file or directly contain the Scala pickle. Rework PipelineMain to use this. Remove the more general extension points in ClassPathPlugin Call global.close() from PipelineMain --- .../scala/tools/nsc/PipelineMain.scala | 160 +++++++----------- .../scala/tools/nsc/backend/Platform.scala | 20 +-- .../tools/nsc/symtab/SymbolLoaders.scala | 7 +- .../symtab/classfile/ClassfileParser.scala | 62 ++----- 4 files changed, 88 insertions(+), 161 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 921bfce4c00..1a5f9c2df80 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -17,10 +17,11 @@ import javax.tools.ToolProvider import scala.collection.{mutable, parallel} import scala.concurrent._ import scala.concurrent.duration.Duration +import scala.reflect.internal.SymbolTable import scala.reflect.internal.pickling.PickleBuffer import scala.reflect.internal.util.FakePos -import scala.tools.nsc.backend.{ClassfileInfo, ScalaClass, ScalaRawClass} -import scala.tools.nsc.classpath.{DirectoryClassPath, ZipArchiveFileLookup} +import scala.reflect.io.PlainNioFile +import scala.tools.nsc.classpath.{ClassPathFactory, DirectoryClassPath, ZipArchiveFileLookup} import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} import scala.tools.nsc.util.ClassPath @@ -31,6 +32,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy private val pickleCache = { new PickleCache(if (pickleCacheConfigured == null) Files.createTempDirectory("scala.picklecache") else Paths.get(pickleCacheConfigured)) } + private val strippedExternalClasspath = mutable.HashMap[Path, Path]() /** Forward errors to the (current) reporter. */ protected def scalacError(msg: String): Unit = { @@ -71,14 +73,10 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } for ((symbol, pickle) <- data) { val base = packageDir(symbol.owner) - val primary = base.fileNamed(symbol.encodedName + ".class") - pickleCache.put(primary, ScalaClass(symbol.fullNameString, () => ByteBuffer.wrap(pickle.bytes))) - - // TODO is this needed? - // if (symbol.companionModule.exists) { - // val secondary = base.fileNamed(symbol.companionModule.encodedName + "$.class") - // pickleCache.put(secondary, ScalaRawClass(symbol.companionModule.fullNameString)) - // } + val primary = base.fileNamed(symbol.encodedName + ".sig").file.toPath + Files.createDirectories(primary.getParent) + Files.write(primary, pickle.bytes) + Files.setLastModifiedTime(primary, Files.getLastModifiedTime(output.file.toPath)) } val classpath = DirectoryClassPath(dir.file) allPickleData.put(output.file.toPath.toRealPath().normalize(), classpath) @@ -130,6 +128,14 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy dependsOn(p) = classPathDeps ++ macroDeps ++ pluginDeps } val dependedOn: Set[Task] = dependsOn.valuesIterator.flatten.map(_.t).toSet + val externalClassPath = projects.iterator.flatMap(_.classPath).filter(p => !produces.contains(p)).toSet + + for (entry <- externalClassPath) { + val extracted = pickleCache.cachePath(new PlainNioFile(entry)) + PickleExtractor.process(entry, extracted) + println(s"Exported pickles from $entry to $extracted") + strippedExternalClasspath(entry) = extracted + } writeDotFile(dependsOn) @@ -140,7 +146,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy projects.foreach { p => val isLeaf = !dependedOn.contains(p) val depsReady = Future.sequence(dependsOn.getOrElse(p, Nil).map { task => p.dependencyReadyFuture(task.t) }) - if (isLeaf) { + val f = if (isLeaf) { for { _ <- depsReady _ <- { @@ -165,8 +171,8 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } yield { p.javaCompile() } - } + f.onComplete { _ => p.compiler.close() } } Await.result(Future.sequence(projects.map(_.compilationDone)), Duration.Inf) @@ -191,7 +197,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy case Pipeline => projects.foreach { p => val depsReady = Future.sequence(dependsOn.getOrElse(p, Nil).map(task => p.dependencyReadyFuture(task.t))) - for { + val f = for { _ <- depsReady _ <- { p.fullCompileExportPickles() @@ -201,7 +207,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } yield { p.javaCompile() } - } + f.onComplete { _ => p.compiler.close() } } Await.result(Future.sequence(projects.map(_.compilationDone)), Duration.Inf) timer.stop() @@ -224,11 +230,12 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy case Traditional => projects.foreach { p => val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map(_.t.javaDone.future)) - f1.flatMap { _ => + val f2 = f1.flatMap { _ => p.outlineDone.complete(Success(())) p.fullCompile() Future.sequence(p.groups.map(_.done.future)).map(_ => p.javaCompile()) } + f2.onComplete { _ => p.compiler.close() } } Await.result(Future.sequence(projects.map(_.compilationDone)), Duration.Inf) timer.stop() @@ -349,7 +356,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val javaDone: Promise[Unit] = Promise[Unit]() def compilationDone: Future[List[Unit]] = Future.sequence(outlineDone.future :: (groups.map(_.done.future) :+ javaDone.future)) - lazy val compiler: Global = { + lazy val compiler: Global = try { val result = newCompiler(command.settings) val reporter = result.reporter if (reporter.hasErrors) @@ -357,6 +364,10 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy else if (command.shouldStopWithInfo) reporter.echo(command.getInfoMessage(result)) result + } catch { + case t: Throwable => + t.printStackTrace() + throw t } def outlineCompile(): Unit = { @@ -396,15 +407,19 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy Future { log(s"scalac (${ix + 1}/$groupCount): start") val compiler2 = newCompiler(command.settings) - val run2 = new compiler2.Run() - group.timer.start() - run2 compile group.files - compiler2.reporter.finish() - group.timer.stop() - if (compiler2.reporter.hasErrors) { - group.done.complete(Failure(new RuntimeException("Compile failed"))) - } else { - group.done.complete(Success(())) + try { + val run2 = new compiler2.Run() + group.timer.start() + run2 compile group.files + compiler2.reporter.finish() + group.timer.stop() + if (compiler2.reporter.hasErrors) { + group.done.complete(Failure(new RuntimeException("Compile failed"))) + } else { + group.done.complete(Success(())) + } + } finally { + compiler2.close() } log(s"scalac (${ix + 1}/$groupCount): done") } @@ -417,20 +432,20 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val group = groups.head log("scalac: start") outlineTimer.start() - val run2 = new compiler.Run() { - override def advancePhase(): Unit = { - if (compiler.phase == this.picklerPhase) { - registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get, symData) - outlineTimer.stop() - outlineDone.complete(Success(())) - group.timer.start() - log("scalac: exported pickles") + try { + val run2 = new compiler.Run() { + override def advancePhase(): Unit = { + if (compiler.phase == this.picklerPhase) { + registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get, symData) + outlineTimer.stop() + outlineDone.complete(Success(())) + group.timer.start() + log("scalac: exported pickles") + } + super.advancePhase() } - super.advancePhase() } - } - try { run2 compile group.files compiler.reporter.finish() group.timer.stop() @@ -496,29 +511,27 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy if (strategy != Traditional) { val plugin: g.platform.ClassPathPlugin = new g.platform.ClassPathPlugin { - val replacements = mutable.Buffer[ClassPath]() - def replaceInternalClassPath(cp: ClassPath, underlying: Path): List[ClassPath] = { + def replace(cp: ClassPath, underlying: Path): List[ClassPath] = { allPickleData.get(underlying.toRealPath().normalize()) match { case null => - cp :: Nil + strippedExternalClasspath.get(underlying.toRealPath().normalize()) match { + case Some(stripped) => + val replacement = ClassPathFactory.newClassPath(new PlainNioFile(stripped), g.settings, g.closeableRegistry) + replacement :: Nil + case None => + cp :: Nil + } case pcp => - replacements += pcp pcp :: Nil } } override def modifyClassPath(classPath: Seq[ClassPath]): Seq[ClassPath] = { - classPath.flatMap { - case zcp: ZipArchiveFileLookup[_] => replaceInternalClassPath(zcp, zcp.zipFile.toPath) - case dcp: DirectoryClassPath => replaceInternalClassPath(dcp, dcp.dir.toPath) + val modified = classPath.flatMap { + case zcp: ZipArchiveFileLookup[_] => replace(zcp, zcp.zipFile.toPath) + case dcp: DirectoryClassPath => replace(dcp, dcp.dir.toPath) case cp => cp :: Nil } - } - - override def info(file: AbstractFile, clazz: g.ClassSymbol): Option[ClassfileInfo] = { - pickleCache.get(file) - } - override def parsed(file: AbstractFile, clazz: g.ClassSymbol, info: ClassfileInfo): Unit = { - pickleCache.put(file, info) + modified } } g.platform.addClassPathPlugin(plugin) @@ -576,55 +589,12 @@ object PipelineMainTest { class PickleCache(val dir: Path) { - private val PicklePattern = """(.*)\.pickle""".r - private val RawPattern = """(.*)\.raw""".r - def get(file: AbstractFile): Option[ClassfileInfo] = synchronized { - val cachePath = this.cachePath(file) - if (Files.exists(cachePath)) { - val listing = Files.list(cachePath) - try { - val it = listing.iterator() - if (it.hasNext) { - val f = it.next() - val name = f.getFileName - name.toString match { - case PicklePattern(className) => - val bytes = Files.readAllBytes(f) - Some(ScalaClass(className, () => ByteBuffer.wrap(bytes))) - case RawPattern(className) => - val bytes = Files.readAllBytes(f) - Some(backend.ScalaRawClass(className)) - case _ => None - } - } else None - } finally { - listing.close() - } - } else None - } def cachePath(file: AbstractFile): Path = { file.underlyingSource match { case Some(jar) if jar ne file => - dir.resolve("." + jar.file.toPath).normalize().resolve(file.path) - case _ => - dir.resolve("./" + file.path).normalize() - } - } - - def put(file: AbstractFile, info: ClassfileInfo): Unit = { - val cachePath = this.cachePath(file) - info match { - case ScalaClass(className, pickle) => - Files.createDirectories(cachePath) - val ch = Channels.newChannel(Files.newOutputStream(cachePath.resolve(className + ".pickle"))) - try ch.write(pickle()) - finally ch.close() - Files.setLastModifiedTime(cachePath, Files.getLastModifiedTime(file.underlyingSource.get.file.toPath)) - case ScalaRawClass(className) => - Files.createDirectories(cachePath) - Files.write(cachePath.resolve(className + ".raw"), Array[Byte]()) - Files.setLastModifiedTime(cachePath, Files.getLastModifiedTime(file.underlyingSource.get.file.toPath)) + dir.resolve("." + jar.file.toPath).normalize().resolve(file.path.replaceAll(".class$", ".sig")) case _ => + dir.resolve("./" + file.path.replaceAll(".class$", ".sig")).normalize() } } } \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/backend/Platform.scala b/src/compiler/scala/tools/nsc/backend/Platform.scala index 8975321d001..9e0001bf6f5 100644 --- a/src/compiler/scala/tools/nsc/backend/Platform.scala +++ b/src/compiler/scala/tools/nsc/backend/Platform.scala @@ -49,8 +49,7 @@ trait Platform { def needCompile(bin: AbstractFile, src: AbstractFile): Boolean /** - * A class path plugin can modify the classpath before it is used by the compiler, and can - * customize the way that the compiler reads the contents of class files. + * A class path plugin can modify the classpath before it is used by the compiler. * * Applications could include: * @@ -60,8 +59,6 @@ trait Platform { * the pickler phase ("Build Pipelining") */ abstract class ClassPathPlugin { - def info(file: AbstractFile, clazz: ClassSymbol): Option[ClassfileInfo] - def parsed(file: AbstractFile, clazz: ClassSymbol, info: ClassfileInfo): Unit = () def modifyClassPath(classPath: Seq[ClassPath]): Seq[ClassPath] = classPath } @@ -85,19 +82,4 @@ trait Platform { if (!classPathPlugins.contains(plugin)) classPathPlugins = plugin :: classPathPlugins } - final def classFileInfo(file: AbstractFile, clazz: ClassSymbol): Option[ClassfileInfo] = if (classPathPlugins eq Nil) None else { - classPathPlugins.foldLeft(Option.empty[ClassfileInfo]) { - case (Some(info), _) => Some(info) - case (None, plugin) => plugin.info(file, clazz) - } - } - final def classFileInfoParsed(file: AbstractFile, clazz: ClassSymbol, info: ClassfileInfo): Unit = if (classPathPlugins eq Nil) None else { - classPathPlugins.foreach(_.parsed(file, clazz, info)) - } } - -sealed abstract class ClassfileInfo {} -final case class ClassBytes(data: () => ByteBuffer) extends ClassfileInfo -final case class ScalaRawClass(className: String) extends ClassfileInfo -final case class ScalaClass(className: String, pickle: () => ByteBuffer) extends ClassfileInfo - diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 2ad68f4d620..89d663b84b4 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -284,8 +284,11 @@ abstract class SymbolLoaders { val classPathEntries = classPath.list(packageName) - if (!root.isRoot) - for (entry <- classPathEntries.classesAndSources) initializeFromClassPath(root, entry) + if (!root.isRoot) { + val listing = classPathEntries.classesAndSources + for (entry <- listing) + initializeFromClassPath(root, entry) + } if (!root.isEmptyPackageClass) { for (pkg <- classPathEntries.packages) { val fullName = pkg.name diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 407073b966f..fcb1b247f07 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -17,18 +17,15 @@ package classfile import java.io._ import java.lang.Integer.toHexString -import java.nio.ByteBuffer -import scala.collection.{immutable, mutable} -import scala.collection.mutable.{ArrayBuffer, ListBuffer} import scala.annotation.switch -import scala.reflect.internal.JavaAccFlags +import scala.collection.mutable.{ArrayBuffer, ListBuffer} +import scala.collection.{immutable, mutable} +import scala.reflect.internal.{JavaAccFlags, SymbolTable} import scala.reflect.internal.pickling.{ByteCodecs, PickleBuffer} -import scala.reflect.io.{NoAbstractFile, VirtualFile} -import scala.reflect.internal.util.Collections._ -import scala.tools.nsc.backend.{ClassBytes, ScalaClass, ScalaRawClass} -import scala.tools.nsc.util.ClassPath +import scala.reflect.io.NoAbstractFile import scala.tools.nsc.io.AbstractFile +import scala.tools.nsc.util.ClassPath import scala.util.control.NonFatal /** This abstract class implements a class file parser. @@ -59,8 +56,9 @@ abstract class ClassfileParser { def classPath: ClassPath import definitions._ - import scala.reflect.internal.ClassfileConstants._ + import Flags._ + import scala.reflect.internal.ClassfileConstants._ protected type ThisConstantPool <: ConstantPool protected def newConstantPool: ThisConstantPool @@ -158,40 +156,16 @@ abstract class ClassfileParser { this.staticModule = module this.isScala = false - import loaders.platform._ - classFileInfo(file, clazz) match { - case Some(info) => - info match { - case ScalaRawClass(className) => - isScalaRaw = true - currentClass = TermName(className) - case ScalaClass(className, pickle) => - val pickle1 = pickle() - isScala = true - currentClass = TermName(className) - if (pickle1.hasArray) { - unpickler.unpickle(pickle1.array, pickle1.arrayOffset + pickle1.position(), clazz, staticModule, file.name) - } else { - val array = new Array[Byte](pickle1.remaining) - pickle1.get(array) - unpickler.unpickle(array, 0, clazz, staticModule, file.name) - } - case ClassBytes(data) => - val data1 = data() - val array = new Array[Byte](data1.remaining) - data1.get(array) - this.in = new AbstractFileReader(file, array) - parseHeader() - this.pool = newConstantPool - parseClass() - } - case None => - this.in = new AbstractFileReader(file) - parseHeader() - this.pool = newConstantPool - parseClass() - if (!(isScala || isScalaRaw)) - loaders.platform.classFileInfoParsed(file, clazz, ClassBytes(() => ByteBuffer.wrap(in.buf))) + this.in = new AbstractFileReader(file) + val magic = in.getInt(in.bp) + if (magic != JAVA_MAGIC && file.name.endsWith(".sig")) { + currentClass = TermName(clazz.javaClassName) + isScala = true + unpickler.unpickle(in.buf, 0, clazz, staticModule, file.name) + } else { + parseHeader() + this.pool = newConstantPool + parseClass() } } } @@ -933,7 +907,6 @@ abstract class ClassfileParser { val bytes = san.assocs.find({ _._1 == nme.bytes }).get._2.asInstanceOf[ScalaSigBytes].bytes unpickler.unpickle(bytes, 0, clazz, staticModule, in.file.name) - loaders.platform.classFileInfoParsed(file, clazz, ScalaClass(this.currentClass.toString, () => ByteBuffer.wrap(bytes))) case None => throw new RuntimeException("Scala class file does not contain Scala annotation") } @@ -1258,7 +1231,6 @@ abstract class ClassfileParser { in.skip(attrLen) case tpnme.ScalaATTR => isScalaRaw = true - loaders.platform.classFileInfoParsed(file, clazz, ScalaRawClass(this.currentClass.toString)) case tpnme.InnerClassesATTR if !isScala => val entries = u2 for (i <- 0 until entries) { From 14e78d837d039fa431c2266beff7867fc83ef6aa Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 6 Dec 2018 11:00:46 +1000 Subject: [PATCH 50/66] Use JARs for exported pickles, too. --- .../scala/tools/nsc/PickleExtractor.scala | 37 +----- .../scala/tools/nsc/PipelineMain.scala | 108 +++++++++--------- .../nsc/classpath/DirectoryClassPath.scala | 7 +- src/reflect/scala/reflect/io/RootPath.scala | 39 +++++++ 4 files changed, 101 insertions(+), 90 deletions(-) create mode 100644 src/reflect/scala/reflect/io/RootPath.scala diff --git a/src/compiler/scala/tools/nsc/PickleExtractor.scala b/src/compiler/scala/tools/nsc/PickleExtractor.scala index a9bccdaf794..37d6bfae71d 100644 --- a/src/compiler/scala/tools/nsc/PickleExtractor.scala +++ b/src/compiler/scala/tools/nsc/PickleExtractor.scala @@ -6,41 +6,12 @@ import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor, _} import scala.collection.JavaConverters.{asScalaBufferConverter, bufferAsJavaListConverter} import scala.reflect.internal.pickling.ByteCodecs +import scala.reflect.io.RootPath import scala.tools.asm.tree.ClassNode import scala.tools.asm.{ClassReader, ClassWriter, Opcodes} object PickleExtractor { - abstract class RootPath extends Closeable { - def root: Path - } - - def rootPath(path: Path, writable: Boolean): RootPath = { - if (path.getFileName.toString.endsWith(".jar")) { - import java.net.URI - val zipFile = URI.create("jar:file:" + path.toUri.getPath) - val env = new java.util.HashMap[String, String]() - if (!Files.exists(path.getParent)) - Files.createDirectories(path.getParent) - if (writable) { - env.put("create", "true") - if (Files.exists(path)) - Files.delete(path) - } - val zipfs = FileSystems.newFileSystem(zipFile, env) - new RootPath { - def root = zipfs.getRootDirectories.iterator().next() - def close(): Unit = { - zipfs.close() - } - } - } else { - new RootPath { - override def root: Path = path - override def close(): Unit = () - } - } - } def main(args: Array[String]): Unit = { args.toList match { case input :: output :: Nil => @@ -49,8 +20,8 @@ object PickleExtractor { } } def process(input: Path, output: Path): Unit = { - val inputPath = rootPath(input, writable = false) - val outputPath = rootPath(output, writable = true) + val inputPath = RootPath(input, writable = false) + val outputPath = RootPath(output, writable = true) try { val root = inputPath.root Files.createDirectories(outputPath.root) @@ -114,8 +85,6 @@ object PickleExtractor { } } output.visibleAnnotations = input.visibleAnnotations.asScala.filter(node => isScalaAnnotation(node.desc) && { - node; - node.values true }).asJava } diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 1a5f9c2df80..3a85ee12fe4 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -6,10 +6,10 @@ package scala.tools.nsc import java.io.File import java.lang.Thread.UncaughtExceptionHandler -import java.nio.ByteBuffer -import java.nio.channels.Channels import java.nio.file.attribute.FileTime import java.nio.file.{Files, Path, Paths} +import java.time.Instant +import java.util import java.util.Collections import javax.tools.ToolProvider @@ -20,7 +20,7 @@ import scala.concurrent.duration.Duration import scala.reflect.internal.SymbolTable import scala.reflect.internal.pickling.PickleBuffer import scala.reflect.internal.util.FakePos -import scala.reflect.io.PlainNioFile +import scala.reflect.io.{PlainNioFile, RootPath} import scala.tools.nsc.classpath.{ClassPathFactory, DirectoryClassPath, ZipArchiveFileLookup} import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} @@ -29,10 +29,12 @@ import scala.util.{Failure, Success} class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy) { private val pickleCacheConfigured = System.getProperty("scala.pipeline.picklecache") - private val pickleCache = { - new PickleCache(if (pickleCacheConfigured == null) Files.createTempDirectory("scala.picklecache") else Paths.get(pickleCacheConfigured)) + private val pickleCache: Path = { + if (pickleCacheConfigured == null) Files.createTempDirectory("scala.picklecache") else Paths.get(pickleCacheConfigured) } - private val strippedExternalClasspath = mutable.HashMap[Path, Path]() + private def cachePath(file: Path): Path = pickleCache.resolve("./" + file).normalize() + + private val strippedAndExportedClassPath = mutable.HashMap[Path, Path]() /** Forward errors to the (current) reporter. */ protected def scalacError(msg: String): Unit = { @@ -50,39 +52,53 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy implicit val executor = ExecutionContext.fromExecutor(new java.util.concurrent.ForkJoinPool(parallelism), t => handler.uncaughtException(Thread.currentThread(), t)) val fileManager = ToolProvider.getSystemJavaCompiler.getStandardFileManager(null, null, null) + def changeExtension(p: Path, newExtension: String): Path = { + val fileName = p.getFileName.toString + val changedFileName = fileName.lastIndexOf('.') match { + case -1 => fileName + newExtension + case n => fileName.substring(0, n) + newExtension + } + p.getParent.resolve(changedFileName) + } - def registerPickleClassPath[G <: Global](output: AbstractFile, data: mutable.AnyRefMap[G#Symbol, PickleBuffer]): Unit = { - val cachePath: Path = pickleCache.cachePath(output) - Files.createDirectories(cachePath) - val dir = AbstractFile.getDirectory(cachePath.toFile) + def registerPickleClassPath[G <: Global](output: Path, data: mutable.AnyRefMap[G#Symbol, PickleBuffer]): Unit = { + val jarPath = changeExtension(cachePath(output), ".jar") + val root = RootPath(jarPath, writable = true) - val dirs = mutable.Map[G#Symbol, AbstractFile]() - def packageDir(packSymbol: G#Symbol): AbstractFile = { - if (packSymbol.isEmptyPackageClass) dir + val dirs = mutable.Map[G#Symbol, Path]() + def packageDir(packSymbol: G#Symbol): Path = { + if (packSymbol.isEmptyPackageClass) root.root else if (dirs.contains(packSymbol)) dirs(packSymbol) else if (packSymbol.owner.isRoot) { - val subDir = dir.subdirectoryNamed(packSymbol.encodedName) + val subDir = root.root.resolve(packSymbol.encodedName) + Files.createDirectories(subDir) dirs.put(packSymbol, subDir) subDir } else { val base = packageDir(packSymbol.owner) - val subDir = base.subdirectoryNamed(packSymbol.encodedName) + val subDir = base.resolve(packSymbol.encodedName) + Files.createDirectories(subDir) dirs.put(packSymbol, subDir) subDir } } - for ((symbol, pickle) <- data) { - val base = packageDir(symbol.owner) - val primary = base.fileNamed(symbol.encodedName + ".sig").file.toPath - Files.createDirectories(primary.getParent) - Files.write(primary, pickle.bytes) - Files.setLastModifiedTime(primary, Files.getLastModifiedTime(output.file.toPath)) + val written = new java.util.IdentityHashMap[AnyRef, Unit]() + try { + for ((symbol, pickle) <- data) { + if (!written.containsKey(pickle)) { + val base = packageDir(symbol.owner) + val primary = base.resolve(symbol.encodedName + ".sig") + Files.write(primary, pickle.bytes) + written.put(pickle, ()) + } + } + } finally { + root.close() } - val classpath = DirectoryClassPath(dir.file) - allPickleData.put(output.file.toPath.toRealPath().normalize(), classpath) + Files.setLastModifiedTime(jarPath, FileTime.from(Instant.now())) + strippedAndExportedClassPath.put(output.toRealPath().normalize(), jarPath) } - private val allPickleData = new java.util.concurrent.ConcurrentHashMap[Path, ClassPath] def writeDotFile(dependsOn: mutable.LinkedHashMap[Task, List[Dependency]]): Unit = { val builder = new java.lang.StringBuilder() @@ -131,10 +147,11 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val externalClassPath = projects.iterator.flatMap(_.classPath).filter(p => !produces.contains(p)).toSet for (entry <- externalClassPath) { - val extracted = pickleCache.cachePath(new PlainNioFile(entry)) + val extracted = cachePath(entry) PickleExtractor.process(entry, extracted) println(s"Exported pickles from $entry to $extracted") - strippedExternalClasspath(entry) = extracted + Files.setLastModifiedTime(extracted, Files.getLastModifiedTime(entry)) + strippedAndExportedClassPath(entry) = extracted } writeDotFile(dependsOn) @@ -262,7 +279,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy private def deleteTempPickleCache(): Unit = { if (pickleCacheConfigured == null) { - AbstractFile.getDirectory(pickleCache.dir.toFile).delete() + AbstractFile.getDirectory(pickleCache.toFile).delete() } } @@ -379,7 +396,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy command.settings.Ymacroexpand.value = command.settings.MacroExpand.None val run1 = new compiler.Run() run1 compile files - registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get, run1.symData) + registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get.file.toPath, run1.symData) outlineTimer.stop() reporter.finish() if (reporter.hasErrors) { @@ -436,7 +453,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val run2 = new compiler.Run() { override def advancePhase(): Unit = { if (compiler.phase == this.picklerPhase) { - registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get, symData) + registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get.file.toPath, symData) outlineTimer.stop() outlineDone.complete(Success(())) group.timer.start() @@ -507,31 +524,24 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy protected def newCompiler(settings: Settings): Global = { val g = Global(settings) - Files.setLastModifiedTime(settings.outputDirs.getSingleOutput.get.file.toPath, FileTime.fromMillis(System.currentTimeMillis())) if (strategy != Traditional) { val plugin: g.platform.ClassPathPlugin = new g.platform.ClassPathPlugin { def replace(cp: ClassPath, underlying: Path): List[ClassPath] = { - allPickleData.get(underlying.toRealPath().normalize()) match { - case null => - strippedExternalClasspath.get(underlying.toRealPath().normalize()) match { - case Some(stripped) => - val replacement = ClassPathFactory.newClassPath(new PlainNioFile(stripped), g.settings, g.closeableRegistry) - replacement :: Nil - case None => - cp :: Nil - } - case pcp => - pcp :: Nil + strippedAndExportedClassPath.get(underlying.toRealPath().normalize()) match { + case Some(stripped) => + val replacement = ClassPathFactory.newClassPath(new PlainNioFile(stripped), g.settings, g.closeableRegistry) + replacement :: Nil + case None => + cp :: Nil } } override def modifyClassPath(classPath: Seq[ClassPath]): Seq[ClassPath] = { - val modified = classPath.flatMap { + classPath.flatMap { case zcp: ZipArchiveFileLookup[_] => replace(zcp, zcp.zipFile.toPath) case dcp: DirectoryClassPath => replace(dcp, dcp.dir.toPath) case cp => cp :: Nil } - modified } } g.platform.addClassPathPlugin(plugin) @@ -586,15 +596,3 @@ object PipelineMainTest { System.exit(0) } } - -class PickleCache(val dir: Path) { - - def cachePath(file: AbstractFile): Path = { - file.underlyingSource match { - case Some(jar) if jar ne file => - dir.resolve("." + jar.file.toPath).normalize().resolve(file.path.replaceAll(".class$", ".sig")) - case _ => - dir.resolve("./" + file.path.replaceAll(".class$", ".sig")).normalize() - } - } -} \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala index c6ab18a1e48..9707af9b1d2 100644 --- a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala @@ -124,7 +124,12 @@ trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends Directo // // Note this behaviour can be enabled in javac with `javac -XDsortfiles`, but that's only // intended to improve determinism of the compiler for compiler hackers. - util.Arrays.sort(listing, (o1: File, o2: File) => o1.getName.compareTo(o2.getName)) + util.Arrays.sort(listing, (o1: File, o2: File) => { + if (o1 == null || o1.getName == null || o2 == null || o2.getName == null) + getClass + o1.getName.compareTo(o2.getName) + } + ) listing } protected def getName(f: File): String = f.getName diff --git a/src/reflect/scala/reflect/io/RootPath.scala b/src/reflect/scala/reflect/io/RootPath.scala new file mode 100644 index 00000000000..51273a9c3f3 --- /dev/null +++ b/src/reflect/scala/reflect/io/RootPath.scala @@ -0,0 +1,39 @@ +package scala.reflect.io + +import java.io.Closeable +import java.nio +import java.nio.file.{FileSystems, Files} + + +abstract class RootPath extends Closeable { + def root: nio.file.Path +} + +object RootPath { + def apply(path: nio.file.Path, writable: Boolean): RootPath = { + if (path.getFileName.toString.endsWith(".jar")) { + import java.net.URI + val zipFile = URI.create("jar:file:" + path.toUri.getPath) + val env = new java.util.HashMap[String, String]() + if (!Files.exists(path.getParent)) + Files.createDirectories(path.getParent) + if (writable) { + env.put("create", "true") + if (Files.exists(path)) + Files.delete(path) + } + val zipfs = FileSystems.newFileSystem(zipFile, env) + new RootPath { + def root = zipfs.getRootDirectories.iterator().next() + def close(): Unit = { + zipfs.close() + } + } + } else { + new RootPath { + override def root: nio.file.Path = path + override def close(): Unit = () + } + } + } +} \ No newline at end of file From 465cafbbd029448b0bafa386ff8b401b4fdd6e2c Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 6 Dec 2018 11:16:28 +1000 Subject: [PATCH 51/66] Remove ClasspathPlugin, just mutate settings.classpath --- .../scala/tools/nsc/PipelineMain.scala | 57 ++++++++----------- .../tools/nsc/backend/JavaPlatform.scala | 2 +- .../scala/tools/nsc/backend/Platform.scala | 35 ------------ .../symtab/SymbolTableForUnitTesting.scala | 2 +- 4 files changed, 27 insertions(+), 69 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 3a85ee12fe4..c48e46d5329 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -9,7 +9,6 @@ import java.lang.Thread.UncaughtExceptionHandler import java.nio.file.attribute.FileTime import java.nio.file.{Files, Path, Paths} import java.time.Instant -import java.util import java.util.Collections import javax.tools.ToolProvider @@ -17,11 +16,9 @@ import javax.tools.ToolProvider import scala.collection.{mutable, parallel} import scala.concurrent._ import scala.concurrent.duration.Duration -import scala.reflect.internal.SymbolTable import scala.reflect.internal.pickling.PickleBuffer import scala.reflect.internal.util.FakePos -import scala.reflect.io.{PlainNioFile, RootPath} -import scala.tools.nsc.classpath.{ClassPathFactory, DirectoryClassPath, ZipArchiveFileLookup} +import scala.reflect.io.RootPath import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} import scala.tools.nsc.util.ClassPath @@ -146,12 +143,24 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val dependedOn: Set[Task] = dependsOn.valuesIterator.flatten.map(_.t).toSet val externalClassPath = projects.iterator.flatMap(_.classPath).filter(p => !produces.contains(p)).toSet - for (entry <- externalClassPath) { - val extracted = cachePath(entry) - PickleExtractor.process(entry, extracted) - println(s"Exported pickles from $entry to $extracted") - Files.setLastModifiedTime(extracted, Files.getLastModifiedTime(entry)) - strippedAndExportedClassPath(entry) = extracted + if (strategy != Traditional) { + val exportTimer = new Timer + exportTimer.start() + for (entry <- externalClassPath) { + val extracted = cachePath(entry) + val sourceTimeStamp = Files.getLastModifiedTime(entry) + if (Files.exists(extracted) && Files.getLastModifiedTime(extracted) == sourceTimeStamp) { + println(s"Skipped export of pickles from $entry to $extracted (up to date)") + } else { + PickleExtractor.process(entry, extracted) + Files.setLastModifiedTime(extracted, sourceTimeStamp) + println(s"Exported pickles from $entry to $extracted") + Files.setLastModifiedTime(extracted, sourceTimeStamp) + } + strippedAndExportedClassPath(entry) = extracted + } + exportTimer.stop() + println(f"Exported external classpath in ${exportTimer.durationMs}%.0f ms") } writeDotFile(dependsOn) @@ -523,32 +532,16 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } protected def newCompiler(settings: Settings): Global = { - val g = Global(settings) - if (strategy != Traditional) { - val plugin: g.platform.ClassPathPlugin = new g.platform.ClassPathPlugin { - def replace(cp: ClassPath, underlying: Path): List[ClassPath] = { - strippedAndExportedClassPath.get(underlying.toRealPath().normalize()) match { - case Some(stripped) => - val replacement = ClassPathFactory.newClassPath(new PlainNioFile(stripped), g.settings, g.closeableRegistry) - replacement :: Nil - case None => - cp :: Nil - } - } - override def modifyClassPath(classPath: Seq[ClassPath]): Seq[ClassPath] = { - classPath.flatMap { - case zcp: ZipArchiveFileLookup[_] => replace(zcp, zcp.zipFile.toPath) - case dcp: DirectoryClassPath => replace(dcp, dcp.dir.toPath) - case cp => cp :: Nil - } - } + val classPath = ClassPath.expandPath(settings.classpath.value, expandStar = true) + val modifiedClassPath = classPath.map { entry => + val entryPath = Paths.get(entry) + strippedAndExportedClassPath.getOrElse(entryPath.toRealPath().normalize(), entryPath).toString } - g.platform.addClassPathPlugin(plugin) + settings.classpath.value = modifiedClassPath.mkString(java.io.File.pathSeparator) } - g + Global(settings) } - } sealed abstract class BuildStrategy diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala index 230be690c25..05396fc6ce7 100644 --- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala +++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala @@ -27,7 +27,7 @@ trait JavaPlatform extends Platform { private[nsc] var currentClassPath: Option[ClassPath] = None private[nsc] def classPath: ClassPath = { - if (currentClassPath.isEmpty) currentClassPath = Some(applyClassPathPlugins(new PathResolver(settings, global.closeableRegistry).result)) + if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings, global.closeableRegistry).result) currentClassPath.get } diff --git a/src/compiler/scala/tools/nsc/backend/Platform.scala b/src/compiler/scala/tools/nsc/backend/Platform.scala index 9e0001bf6f5..c48f9f079fe 100644 --- a/src/compiler/scala/tools/nsc/backend/Platform.scala +++ b/src/compiler/scala/tools/nsc/backend/Platform.scala @@ -47,39 +47,4 @@ trait Platform { * a re-compile is triggered. On .NET by contrast classfiles always take precedence. */ def needCompile(bin: AbstractFile, src: AbstractFile): Boolean - - /** - * A class path plugin can modify the classpath before it is used by the compiler. - * - * Applications could include: - * - * - Caching the ScalaSignature annotation contents, to avoid the cost of decompressing - * and parsing the classfile, akin to the OpenJDK's .sig format for stripped class files. - * - Starting a downstream compilation job immediately after the upstream job has completed - * the pickler phase ("Build Pipelining") - */ - abstract class ClassPathPlugin { - def modifyClassPath(classPath: Seq[ClassPath]): Seq[ClassPath] = classPath - } - - /** A list of registered classpath plugins */ - private var classPathPlugins: List[ClassPathPlugin] = Nil - - protected final def applyClassPathPlugins(original: ClassPath): ClassPath = { - val entries = original match { - case AggregateClassPath(entries) => entries - case single => single :: Nil - } - val entries1 = classPathPlugins.foldLeft(entries) { - (entries, plugin) => plugin.modifyClassPath(entries) - } - AggregateClassPath(entries1) - } - - - /** Registers a new classpath plugin */ - final def addClassPathPlugin(plugin: ClassPathPlugin): Unit = { - if (!classPathPlugins.contains(plugin)) - classPathPlugins = plugin :: classPathPlugins - } } diff --git a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala index 7c4c4c522c0..e2b11cfecd2 100644 --- a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala +++ b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala @@ -36,7 +36,7 @@ class SymbolTableForUnitTesting extends SymbolTable { def platformPhases: List[SubComponent] = Nil - private[nsc] lazy val classPath: ClassPath = applyClassPathPlugins(new PathResolver(settings, new CloseableRegistry).result) + private[nsc] lazy val classPath: ClassPath = new PathResolver(settings, new CloseableRegistry).result def isMaybeBoxed(sym: Symbol): Boolean = ??? def needCompile(bin: AbstractFile, src: AbstractFile): Boolean = ??? From 8138c59628c5ee90ef5e93374f3f11efcd84b74f Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 6 Dec 2018 11:23:21 +1000 Subject: [PATCH 52/66] refactor --- .../scala/tools/nsc/PipelineMain.scala | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index c48e46d5329..9ab652b8014 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -167,6 +167,11 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val timer = new Timer timer.start() + + def awaitDone(): Unit = { + Await.result(Future.sequence(projects.map(_.compilationDone)), Duration.Inf) + timer.stop() + } strategy match { case OutlineTypePipeline => projects.foreach { p => @@ -201,8 +206,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy f.onComplete { _ => p.compiler.close() } } - Await.result(Future.sequence(projects.map(_.compilationDone)), Duration.Inf) - timer.stop() + awaitDone() for (p <- projects) { val dependencies = dependsOn(p).map(_.t) @@ -233,9 +237,9 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } yield { p.javaCompile() } - f.onComplete { _ => p.compiler.close() } } - Await.result(Future.sequence(projects.map(_.compilationDone)), Duration.Inf) - timer.stop() + f.onComplete { _ => p.compiler.close() } + } + awaitDone() for (p <- projects) { val dependencies = dependsOn(p).map(_.t) @@ -255,7 +259,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy println(f" Wall Clock: ${timer.durationMs}%.0f ms") case Traditional => projects.foreach { p => - val f1 = Future.sequence[Unit, List](dependsOn.getOrElse(p, Nil).map(_.t.javaDone.future)) + val f1 = Future.sequence(dependsOn.getOrElse(p, Nil).map(_.t.javaDone.future)) val f2 = f1.flatMap { _ => p.outlineDone.complete(Success(())) p.fullCompile() @@ -263,8 +267,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } f2.onComplete { _ => p.compiler.close() } } - Await.result(Future.sequence(projects.map(_.compilationDone)), Duration.Inf) - timer.stop() + awaitDone() for (p <- projects) { val dependencies = dependsOn(p).map(_.t) From be0cb1ac20cd4a63b6c40b33df60d44838cfebab Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 6 Dec 2018 16:42:42 +1000 Subject: [PATCH 53/66] Fixup use the original classpath for javac --- .../scala/tools/nsc/PickleExtractor.scala | 6 +-- .../scala/tools/nsc/PipelineMain.scala | 50 +++++++++++-------- 2 files changed, 31 insertions(+), 25 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PickleExtractor.scala b/src/compiler/scala/tools/nsc/PickleExtractor.scala index 37d6bfae71d..53a54b12e12 100644 --- a/src/compiler/scala/tools/nsc/PickleExtractor.scala +++ b/src/compiler/scala/tools/nsc/PickleExtractor.scala @@ -4,7 +4,7 @@ import java.io.Closeable import java.nio.file.attribute.BasicFileAttributes import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor, _} -import scala.collection.JavaConverters.{asScalaBufferConverter, bufferAsJavaListConverter} +import scala.collection.JavaConverters.{asScalaBufferConverter, bufferAsJavaListConverter, collectionAsScalaIterableConverter} import scala.reflect.internal.pickling.ByteCodecs import scala.reflect.io.RootPath import scala.tools.asm.tree.ClassNode @@ -78,8 +78,8 @@ object PickleExtractor { val len = ByteCodecs.decode(bytes) pickleData = bytes.take(len) } else if (node.desc == "Lscala/reflect/ScalaLongSignature;") { - val Array("bytes", data: Array[String]) = node.values.toArray() - val encoded = data flatMap (_.getBytes(java.nio.charset.StandardCharsets.UTF_8)) + val Array("bytes", data: java.util.Collection[String @unchecked]) = node.values.toArray() + val encoded = data.asScala.toArray flatMap (_.getBytes(java.nio.charset.StandardCharsets.UTF_8)) val len = ByteCodecs.decode(encoded) pickleData = encoded.take(len) } diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 9ab652b8014..bd9ff3d835e 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -13,6 +13,7 @@ import java.util.Collections import javax.tools.ToolProvider +import scala.collection.JavaConverters.asScalaIteratorConverter import scala.collection.{mutable, parallel} import scala.concurrent._ import scala.concurrent.duration.Duration @@ -24,7 +25,7 @@ import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} import scala.tools.nsc.util.ClassPath import scala.util.{Failure, Success} -class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy) { +class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy, argFiles: Seq[Path]) { private val pickleCacheConfigured = System.getProperty("scala.pipeline.picklecache") private val pickleCache: Path = { if (pickleCacheConfigured == null) Files.createTempDirectory("scala.picklecache") else Paths.get(pickleCacheConfigured) @@ -117,18 +118,18 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy private case class Dependency(t: Task, isMacro: Boolean, isPlugin: Boolean) - def process(args: Array[String]): Boolean = { + def process(): Boolean = { println(s"parallelism = $parallelism, strategy = $strategy") reporter = new ConsoleReporter(new Settings(scalacError)) - def commandFor(argFileArg: String): Task = { + def commandFor(argFileArg: Path): Task = { val ss = new Settings(scalacError) val command = new CompilerCommand(("@" + argFileArg) :: Nil, ss) Task(argFileArg, command, command.files) } - val projects: List[Task] = args.toList.map(commandFor) + val projects: List[Task] = argFiles.toList.map(commandFor) val produces = mutable.LinkedHashMap[Path, Task]() for (p <- projects) { produces(p.outputDir) = p @@ -141,7 +142,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy dependsOn(p) = classPathDeps ++ macroDeps ++ pluginDeps } val dependedOn: Set[Task] = dependsOn.valuesIterator.flatten.map(_.t).toSet - val externalClassPath = projects.iterator.flatMap(_.classPath).filter(p => !produces.contains(p)).toSet + val externalClassPath = projects.iterator.flatMap(_.classPath).filter(p => !produces.contains(p) && Files.exists(p)).toSet if (strategy != Traditional) { val exportTimer = new Timer @@ -150,7 +151,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val extracted = cachePath(entry) val sourceTimeStamp = Files.getLastModifiedTime(entry) if (Files.exists(extracted) && Files.getLastModifiedTime(extracted) == sourceTimeStamp) { - println(s"Skipped export of pickles from $entry to $extracted (up to date)") + // println(s"Skipped export of pickles from $entry to $extracted (up to date)") } else { PickleExtractor.process(entry, extracted) Files.setLastModifiedTime(extracted, sourceTimeStamp) @@ -331,9 +332,9 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val done = Promise[Unit]() } - private case class Task(argsFile: String, command: CompilerCommand, files: List[String]) { - val label = argsFile.replaceAll("target/", "").replaceAll("""(.*)/(.*).args""", "$1:$2") - override def toString: String = argsFile + private case class Task(argsFile: Path, command: CompilerCommand, files: List[String]) { + val label = argsFile.toString.replaceAll("target/", "").replaceAll("""(.*)/(.*).args""", "$1:$2") + override def toString: String = argsFile.toString def outputDir: Path = command.settings.outputDirs.getSingleOutput.get.file.toPath.toAbsolutePath.normalize() private def expand(s: command.settings.PathSetting): List[Path] = { ClassPath.expandPath(s.value, expandStar = true).map(s => Paths.get(s).toAbsolutePath.normalize()) @@ -385,6 +386,8 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val javaDone: Promise[Unit] = Promise[Unit]() def compilationDone: Future[List[Unit]] = Future.sequence(outlineDone.future :: (groups.map(_.done.future) :+ javaDone.future)) + val originalClassPath: String = command.settings.classpath.value + lazy val compiler: Global = try { val result = newCompiler(command.settings) val reporter = result.reporter @@ -501,7 +504,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy if (javaSources.nonEmpty) { javaTimer.start() javaDone.completeWith(Future { - val opts = java.util.Arrays.asList("-d", command.settings.outdir.value, "-cp", command.settings.outdir.value + File.pathSeparator + command.settings.classpath.value) + val opts = java.util.Arrays.asList("-d", command.settings.outdir.value, "-cp", command.settings.outdir.value + File.pathSeparator + originalClassPath) val compileTask = ToolProvider.getSystemJavaCompiler.getTask(null, null, null, opts, null, fileManager.getJavaFileObjects(javaSources.toArray: _*)) compileTask.setProcessors(Collections.emptyList()) compileTask.call() @@ -539,7 +542,10 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val classPath = ClassPath.expandPath(settings.classpath.value, expandStar = true) val modifiedClassPath = classPath.map { entry => val entryPath = Paths.get(entry) - strippedAndExportedClassPath.getOrElse(entryPath.toRealPath().normalize(), entryPath).toString + if (Files.exists(entryPath)) + strippedAndExportedClassPath.getOrElse(entryPath.toRealPath().normalize(), entryPath).toString + else + entryPath } settings.classpath.value = modifiedClassPath.mkString(java.io.File.pathSeparator) } @@ -562,8 +568,14 @@ object PipelineMain { val strategies = List(OutlineTypePipeline, Pipeline, Traditional) val strategy = strategies.find(_.productPrefix.equalsIgnoreCase(System.getProperty("scala.pipeline.strategy", "pipeline"))).get val parallelism = java.lang.Integer.getInteger("scala.pipeline.parallelism", parallel.availableProcessors) - val main = new PipelineMainClass("1", parallelism, strategy) - val result = main.process(args) + val argFiles: Seq[Path] = args match { + case Array(path) if Files.isDirectory(Paths.get(path)) => + Files.walk(Paths.get(path)).iterator().asScala.filter(_.getFileName.toString.endsWith(".args")).toList + case _ => + args.map(Paths.get(_)) + } + val main = new PipelineMainClass("1", parallelism, strategy, argFiles) + val result = main.process() if (!result) System.exit(1) else @@ -574,18 +586,12 @@ object PipelineMain { object PipelineMainTest { def main(args: Array[String]): Unit = { var i = 0 + val argsFiles = Files.walk(Paths.get("/code/guardian-frontend")).iterator().asScala.filter(_.getFileName.toString.endsWith(".args")).toList for (_ <- 1 to 10; n <- List(parallel.availableProcessors); strat <- List(Pipeline, OutlineTypePipeline, Traditional)) { i += 1 - val main = new PipelineMainClass(i.toString, n, strat) + val main = new PipelineMainClass(i.toString, n, strat, argsFiles) println(s"====== ITERATION $i=======") - val result = main.process(Array( - "/code/boxer/macros/target/compile.args", - "/code/boxer/plugin/target/compile.args", - "/code/boxer/support/target/compile.args", - "/code/boxer/use-macro/target/compile.args", - "/code/boxer/use-plugin/target/compile.args", - "/code/boxer/use-macro2/target/compile.args", - "/code/boxer/use-plugin2/target/compile.args")) + val result = main.process() if (!result) System.exit(1) } From 054c756879c83e33b2c235aec8883715d614f2ef Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 6 Dec 2018 17:06:18 +1000 Subject: [PATCH 54/66] More helpful output --- src/compiler/scala/tools/nsc/PipelineMain.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index bd9ff3d835e..d7b66feef46 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -324,7 +324,9 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy projects.iterator.flatMap(projectEvents).addString(sb, ",\n") trace.append("]}") - Files.write(Paths.get(s"build-${label}.trace"), trace.toString.getBytes()) + val traceFile = Paths.get(s"build-${label}.trace") + Files.write(traceFile, trace.toString.getBytes()) + println("Chrome trace written to " + traceFile.toAbsolutePath) } case class Group(files: List[String]) { @@ -589,7 +591,7 @@ object PipelineMainTest { val argsFiles = Files.walk(Paths.get("/code/guardian-frontend")).iterator().asScala.filter(_.getFileName.toString.endsWith(".args")).toList for (_ <- 1 to 10; n <- List(parallel.availableProcessors); strat <- List(Pipeline, OutlineTypePipeline, Traditional)) { i += 1 - val main = new PipelineMainClass(i.toString, n, strat, argsFiles) + val main = new PipelineMainClass(strat + "-" + i, n, strat, argsFiles) println(s"====== ITERATION $i=======") val result = main.process() if (!result) From c36f26d6e3670a28cbc30c5677906f124842ce3a Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 6 Dec 2018 21:17:15 +1000 Subject: [PATCH 55/66] Always extract non-internal dependencies to JARs. --- src/compiler/scala/tools/nsc/PipelineMain.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index d7b66feef46..4e3742762db 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -148,7 +148,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val exportTimer = new Timer exportTimer.start() for (entry <- externalClassPath) { - val extracted = cachePath(entry) + val extracted = changeExtension(cachePath(entry), ".jar") val sourceTimeStamp = Files.getLastModifiedTime(entry) if (Files.exists(extracted) && Files.getLastModifiedTime(extracted) == sourceTimeStamp) { // println(s"Skipped export of pickles from $entry to $extracted (up to date)") From 1aaf577b8ae796ddc787a90f23991d0463b50e8e Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 7 Dec 2018 11:11:19 +1000 Subject: [PATCH 56/66] Exported pickles can be put in JARs or directories --- .../scala/tools/nsc/PipelineMain.scala | 22 +++++++++++++------ 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 4e3742762db..73b2388b3b6 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -25,12 +25,18 @@ import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} import scala.tools.nsc.util.ClassPath import scala.util.{Failure, Success} -class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy, argFiles: Seq[Path]) { +class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy, argFiles: Seq[Path], useJars: Boolean) { private val pickleCacheConfigured = System.getProperty("scala.pipeline.picklecache") private val pickleCache: Path = { - if (pickleCacheConfigured == null) Files.createTempDirectory("scala.picklecache") else Paths.get(pickleCacheConfigured) + if (pickleCacheConfigured == null) Files.createTempDirectory("scala.picklecache") + else { + Paths.get(pickleCacheConfigured) + } + } + private def cachePath(file: Path): Path = { + val newExtension = if (useJars) ".jar" else "" + changeExtension(pickleCache.resolve("./" + file).normalize(), newExtension) } - private def cachePath(file: Path): Path = pickleCache.resolve("./" + file).normalize() private val strippedAndExportedClassPath = mutable.HashMap[Path, Path]() @@ -60,8 +66,9 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } def registerPickleClassPath[G <: Global](output: Path, data: mutable.AnyRefMap[G#Symbol, PickleBuffer]): Unit = { - val jarPath = changeExtension(cachePath(output), ".jar") + val jarPath = cachePath(output) val root = RootPath(jarPath, writable = true) + Files.createDirectories(root.root) val dirs = mutable.Map[G#Symbol, Path]() def packageDir(packSymbol: G#Symbol): Path = { @@ -148,7 +155,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val exportTimer = new Timer exportTimer.start() for (entry <- externalClassPath) { - val extracted = changeExtension(cachePath(entry), ".jar") + val extracted = cachePath(entry) val sourceTimeStamp = Files.getLastModifiedTime(entry) if (Files.exists(extracted) && Files.getLastModifiedTime(extracted) == sourceTimeStamp) { // println(s"Skipped export of pickles from $entry to $extracted (up to date)") @@ -570,13 +577,14 @@ object PipelineMain { val strategies = List(OutlineTypePipeline, Pipeline, Traditional) val strategy = strategies.find(_.productPrefix.equalsIgnoreCase(System.getProperty("scala.pipeline.strategy", "pipeline"))).get val parallelism = java.lang.Integer.getInteger("scala.pipeline.parallelism", parallel.availableProcessors) + val useJars = java.lang.Boolean.getBoolean("scala.pipeline.use.jar") val argFiles: Seq[Path] = args match { case Array(path) if Files.isDirectory(Paths.get(path)) => Files.walk(Paths.get(path)).iterator().asScala.filter(_.getFileName.toString.endsWith(".args")).toList case _ => args.map(Paths.get(_)) } - val main = new PipelineMainClass("1", parallelism, strategy, argFiles) + val main = new PipelineMainClass("1", parallelism, strategy, argFiles, useJars) val result = main.process() if (!result) System.exit(1) @@ -591,7 +599,7 @@ object PipelineMainTest { val argsFiles = Files.walk(Paths.get("/code/guardian-frontend")).iterator().asScala.filter(_.getFileName.toString.endsWith(".args")).toList for (_ <- 1 to 10; n <- List(parallel.availableProcessors); strat <- List(Pipeline, OutlineTypePipeline, Traditional)) { i += 1 - val main = new PipelineMainClass(strat + "-" + i, n, strat, argsFiles) + val main = new PipelineMainClass(strat + "-" + i, n, strat, argsFiles, useJars = true) println(s"====== ITERATION $i=======") val result = main.process() if (!result) From 1469604c68849be9eccfa0bac510c1ddfe45ba30 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 10 Dec 2018 12:49:15 +1000 Subject: [PATCH 57/66] Disable classloader caching for now --- src/compiler/scala/tools/nsc/PipelineMain.scala | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 73b2388b3b6..5603d33442a 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -367,9 +367,6 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy dependency.outlineDone.future - command.settings.YcacheMacroClassLoader.value = "always" - command.settings.YcachePluginClassLoader.value = "always" - val groups: List[Group] = { val isScalaLibrary = files.exists(_.endsWith("Predef.scala")) if (strategy != OutlineTypePipeline || isScalaLibrary) { From 02039c9a02ed4a11b7044107be0947b8ffe54284 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 10 Dec 2018 13:38:05 +1000 Subject: [PATCH 58/66] Another shot at fixing classloader caching --- .../scala/tools/nsc/PipelineMain.scala | 2 +- .../ZipAndJarFileLookupFactory.scala | 73 +++++++++---------- 2 files changed, 35 insertions(+), 40 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 5603d33442a..4a22ec5eced 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -593,7 +593,7 @@ object PipelineMain { object PipelineMainTest { def main(args: Array[String]): Unit = { var i = 0 - val argsFiles = Files.walk(Paths.get("/code/guardian-frontend")).iterator().asScala.filter(_.getFileName.toString.endsWith(".args")).toList + val argsFiles = Files.walk(Paths.get("/code/boxer")).iterator().asScala.filter(_.getFileName.toString.endsWith(".args")).toList for (_ <- 1 to 10; n <- List(parallel.availableProcessors); strat <- List(Pipeline, OutlineTypePipeline, Traditional)) { i += 1 val main = new PipelineMainClass(strat + "-" + i, n, strat, argsFiles, useJars = true) diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala index 6e7182e656b..3a72804080e 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala @@ -196,24 +196,40 @@ final class FileBasedCache[T] { private case class Stamp(lastModified: FileTime, fileKey: Object) private case class Entry(stamps: Seq[Stamp], t: T) { val referenceCount: AtomicInteger = new AtomicInteger(1) - def referenceCountDecrementer: Closeable = new Closeable { - var closed = false - override def close(): Unit = { - if (!closed) { - closed = true - val count = referenceCount.decrementAndGet() - if (count == 0) { - t match { - case cl: Closeable => - FileBasedCache.deferredClose(referenceCount, cl) - case _ => - } + } + private val cache = collection.mutable.Map.empty[Seq[Path], Entry] + + private def referenceCountDecrementer(e: Entry, paths: Seq[Path]): Closeable = new Closeable { + var closed = false + override def close(): Unit = { + if (!closed) { + closed = true + val count = e.referenceCount.decrementAndGet() + if (count == 0) { + e.t match { + case cl: Closeable => + FileBasedCache.timer match { + case Some(timer) => + val task = new TimerTask { + override def run(): Unit = { + cache.synchronized { + if (e.referenceCount.compareAndSet(0, -1)) { + cache.remove(paths) + cl.close() + } + } + } + } + timer.schedule(task, FileBasedCache.deferCloseMs.toLong) + case None => + cl.close() + } + case _ => } } } } } - private val cache = collection.mutable.Map.empty[Seq[Path], Entry] def getOrCreate(paths: Seq[Path], create: () => T, closeableRegistry: CloseableRegistry, checkStamps: Boolean): T = cache.synchronized { val stamps = paths.map { path => @@ -229,16 +245,9 @@ final class FileBasedCache[T] { if (!checkStamps || cachedStamps == stamps) { // Cache hit val count = e.referenceCount.incrementAndGet() - if (count == 1) { - // Closed, recreate. - val value = create() - val entry = Entry(stamps, value) - cache.put(paths, entry) - value - } else { - closeableRegistry.registerClosable(e.referenceCountDecrementer) - cached - } + assert(count > 0, (stamps, count)) + closeableRegistry.registerClosable(referenceCountDecrementer(e, paths)) + cached } else { // Cache miss: we found an entry but the underlying files have been modified cached match { @@ -252,7 +261,7 @@ final class FileBasedCache[T] { val value = create() val entry = Entry(stamps, value) cache.put(paths, entry) - closeableRegistry.registerClosable(entry.referenceCountDecrementer) + closeableRegistry.registerClosable(referenceCountDecrementer(entry, paths)) value } case _ => @@ -260,7 +269,7 @@ final class FileBasedCache[T] { val value = create() val entry = Entry(stamps, value) cache.put(paths, entry) - closeableRegistry.registerClosable(entry.referenceCountDecrementer) + closeableRegistry.registerClosable(referenceCountDecrementer(entry, paths)) value } } @@ -279,18 +288,4 @@ object FileBasedCache { Some(new java.util.Timer(true)) else None } - private def deferredClose(referenceCount: AtomicInteger, closable: Closeable): Unit = { - timer match { - case Some(timer) => - val task = new TimerTask { - override def run(): Unit = { - if (referenceCount.get == 0) - closable.close() - } - } - timer.schedule(task, FileBasedCache.deferCloseMs.toLong) - case None => - closable.close() - } - } } From 640b07ca0b0694b96a304f012299b87c753a2a42 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 10 Dec 2018 13:40:00 +1000 Subject: [PATCH 59/66] Enable macro/plugin classloader caching with an opt-in property --- src/compiler/scala/tools/nsc/PipelineMain.scala | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 4a22ec5eced..beb4a7591e8 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -367,6 +367,13 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy dependency.outlineDone.future + val cacheMacro = java.lang.Boolean.getBoolean("scala.pipeline.cache.macro.classloader") + val cachePlugin = java.lang.Boolean.getBoolean("scala.pipeline.cache.plugin.classloader") + if (cacheMacro) + command.settings.YcacheMacroClassLoader.value = "always" + if (cachePlugin) + command.settings.YcachePluginClassLoader.value = "always" + val groups: List[Group] = { val isScalaLibrary = files.exists(_.endsWith("Predef.scala")) if (strategy != OutlineTypePipeline || isScalaLibrary) { From 29c9cbc9e50fd90039404f1ed6c99daadbf4a322 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 11 Dec 2018 18:04:06 +1000 Subject: [PATCH 60/66] WIP Support ident references in Java code to static classes declared in a base class --- .../scala/tools/nsc/typechecker/Contexts.scala | 17 ++++++++++++++--- .../pos/java-ref-super-class-static/J.java | 12 ++++++++++++ 2 files changed, 26 insertions(+), 3 deletions(-) create mode 100644 test/files/pos/java-ref-super-class-static/J.java diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index ad643bc9a9f..59d2d06356b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -1072,14 +1072,25 @@ trait Contexts { self: Analyzer => || unit.exists && s.sourceFile != unit.source.file ) ) - def lookupInPrefix(name: Name) = { + def lookupInPrefix(name: Name): Symbol = { val sym = pre.member(name).filter(qualifies) def isNonPackageNoModuleClass(sym: Symbol) = sym.isClass && !sym.isModuleClass && !sym.isPackageClass if (!sym.exists && unit.isJava && isNonPackageNoModuleClass(pre.typeSymbol)) { + var baseClasses = pre.baseClasses + while (baseClasses != Nil) { + val base = baseClasses.head + val pre1 = companionSymbolOf(base, this).typeOfThis + val sym = pre1.member(name).filter(qualifies) + .andAlso(_ => pre = pre1) + if (sym != NoSymbol) { + pre = pre1 + return sym + } + baseClasses = baseClasses.tail + } + NoSymbol // TODO factor out duplication with Typer::inCompanionForJavaStatic - val pre1 = companionSymbolOf(pre.typeSymbol, this).typeOfThis - pre1.member(name).filter(qualifies).andAlso(_ => pre = pre1) } else sym } def accessibleInPrefix(s: Symbol) = isAccessible(s, pre, superAccess = false) diff --git a/test/files/pos/java-ref-super-class-static/J.java b/test/files/pos/java-ref-super-class-static/J.java new file mode 100644 index 00000000000..0bd7545e6e9 --- /dev/null +++ b/test/files/pos/java-ref-super-class-static/J.java @@ -0,0 +1,12 @@ +class J { + static class Base { + static class StaticInner {} + class Inner {} + } + static class Sub extends Base { + void f1( Inner inner) {} + + void f2( StaticInner inner) {} // not found: "StaticInner" + void f3(Base.StaticInner inner) {} // workaround: qualifiy + } +} From b74afb4c5d3f88c6acd5a0536e3ce4a612f25fa0 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 12 Dec 2018 10:11:55 +1000 Subject: [PATCH 61/66] Avoid quadratic performance wrt classpath length --- .../scala/tools/nsc/PipelineMain.scala | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index beb4a7591e8..e2391f47c75 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -184,7 +184,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy case OutlineTypePipeline => projects.foreach { p => val isLeaf = !dependedOn.contains(p) - val depsReady = Future.sequence(dependsOn.getOrElse(p, Nil).map { task => p.dependencyReadyFuture(task.t) }) + val depsReady = Future.sequence(dependsOn.getOrElse(p, Nil).map { task => p.dependencyReadyFuture(task) }) val f = if (isLeaf) { for { _ <- depsReady @@ -234,7 +234,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy println(f" Wall Clock: ${timer.durationMs}%.0f ms") case Pipeline => projects.foreach { p => - val depsReady = Future.sequence(dependsOn.getOrElse(p, Nil).map(task => p.dependencyReadyFuture(task.t))) + val depsReady = Future.sequence(dependsOn.getOrElse(p, Nil).iterator.map(task => p.dependencyReadyFuture(task))) val f = for { _ <- depsReady _ <- { @@ -348,23 +348,23 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy private def expand(s: command.settings.PathSetting): List[Path] = { ClassPath.expandPath(s.value, expandStar = true).map(s => Paths.get(s).toAbsolutePath.normalize()) } - def classPath: Seq[Path] = expand(command.settings.classpath) - def macroClassPath: Seq[Path] = expand(command.settings.YmacroClasspath) - def macroClassPathSet: Set[Path] = macroClassPath.toSet - def pluginClassPath: Set[Path] = { + lazy val classPath: Seq[Path] = expand(command.settings.classpath) + lazy val macroClassPath: Seq[Path] = expand(command.settings.YmacroClasspath) + lazy val macroClassPathSet: Set[Path] = macroClassPath.toSet + lazy val pluginClassPath: Set[Path] = { def asPath(p: String) = ClassPath split p val paths = command.settings.plugin.value filter (_ != "") flatMap (s => asPath(s) map (s => Paths.get(s))) paths.toSet } - def dependencyReadyFuture(dependency: Task) = if (macroClassPathSet.contains(dependency.outputDir)) { - log(s"dependency is on macro classpath, will wait for .class files: ${dependency.label}") - dependency.javaDone.future - } else if (pluginClassPath.contains(dependency.outputDir)) { - log(s"dependency is on plugin classpath, will wait for .class files: ${dependency.label}") - dependency.javaDone.future + def dependencyReadyFuture(dependency: Dependency) = if (dependency.isMacro) { + log(s"dependency is on macro classpath, will wait for .class files: ${dependency.t.label}") + dependency.t.javaDone.future + } else if (dependency.isPlugin) { + log(s"dependency is on plugin classpath, will wait for .class files: ${dependency.t.label}") + dependency.t.javaDone.future } else - dependency.outlineDone.future + dependency.t.outlineDone.future val cacheMacro = java.lang.Boolean.getBoolean("scala.pipeline.cache.macro.classloader") From 47ccc230acb58c2df48bbc209ba9777a45849e88 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 12 Dec 2018 17:08:33 +1000 Subject: [PATCH 62/66] Fix error reporting, add diagnostic for any hangs that remain --- .../scala/tools/nsc/PipelineMain.scala | 78 +++++++++++++++---- 1 file changed, 64 insertions(+), 14 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index e2391f47c75..0ae4a05f308 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -10,11 +10,12 @@ import java.nio.file.attribute.FileTime import java.nio.file.{Files, Path, Paths} import java.time.Instant import java.util.Collections +import java.util.concurrent.atomic.AtomicInteger import javax.tools.ToolProvider import scala.collection.JavaConverters.asScalaIteratorConverter -import scala.collection.{mutable, parallel} +import scala.collection.{immutable, mutable, parallel} import scala.concurrent._ import scala.concurrent.duration.Duration import scala.reflect.internal.pickling.PickleBuffer @@ -23,7 +24,7 @@ import scala.reflect.io.RootPath import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} import scala.tools.nsc.util.ClassPath -import scala.util.{Failure, Success} +import scala.util.{Failure, Success, Try} class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy, argFiles: Seq[Path], useJars: Boolean) { private val pickleCacheConfigured = System.getProperty("scala.pipeline.picklecache") @@ -137,6 +138,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } val projects: List[Task] = argFiles.toList.map(commandFor) + val numProjects = projects.size val produces = mutable.LinkedHashMap[Path, Task]() for (p <- projects) { produces(p.outputDir) = p @@ -176,9 +178,49 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val timer = new Timer timer.start() + def awaitAll(fs: Seq[Future[_]]): Future[_] = { + val done = Promise[Any]() + val allFutures = projects.flatMap(_.futures) + val count = allFutures.size + val counter = new AtomicInteger(count) + val handler = (a: Try[_]) => a match { + case f @ Failure(_) => + done.complete(f) + case Success(_) => + val remaining = counter.decrementAndGet() + if (remaining == 0) done.success(()) + } + + allFutures.foreach(_.onComplete(handler)) + done.future + } + def awaitDone(): Unit = { - Await.result(Future.sequence(projects.map(_.compilationDone)), Duration.Inf) - timer.stop() + val allFutures: immutable.Seq[Future[_]] = projects.flatMap(_.futures) + val numAllFutures = allFutures.size + val awaitAllFutures: Future[_] = awaitAll(allFutures) + val numTasks = awaitAllFutures + var lastNumCompleted = allFutures.count(_.isCompleted) + while (true) try { + Await.result(awaitAllFutures, Duration(60, "s")) + timer.stop() + return + } catch { + case _: TimeoutException => + val numCompleted = allFutures.count(_.isCompleted) + if (numCompleted == lastNumCompleted) { + println(s"STALLED: $numCompleted / $numAllFutures") + println("Outline/Scala/Javac") + projects.map { + p => + def toX(b: Future[_]): String = b.value match { case None => "-"; case Some(Success(_)) => "x"; case Some(Failure(_)) => "!" } + val s = List(p.outlineDoneFuture, p.groupsDoneFuture, p.javaDoneFuture).map(toX).mkString(" ") + println(s + " " + p.label) + } + } else { + println(s"PROGRESS: $numCompleted / $numAllFutures") + } + } } strategy match { case OutlineTypePipeline => @@ -234,7 +276,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy println(f" Wall Clock: ${timer.durationMs}%.0f ms") case Pipeline => projects.foreach { p => - val depsReady = Future.sequence(dependsOn.getOrElse(p, Nil).iterator.map(task => p.dependencyReadyFuture(task))) + val depsReady = Future.sequence(dependsOn.getOrElse(p, Nil).map(task => p.dependencyReadyFuture(task))) val f = for { _ <- depsReady _ <- { @@ -396,8 +438,13 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy var regularCriticalPathMs = 0d var fullCriticalPathMs = 0d val outlineDone: Promise[Unit] = Promise[Unit]() + val outlineDoneFuture = outlineDone.future val javaDone: Promise[Unit] = Promise[Unit]() - def compilationDone: Future[List[Unit]] = Future.sequence(outlineDone.future :: (groups.map(_.done.future) :+ javaDone.future)) + val javaDoneFuture: Future[_] = javaDone.future + val groupsDoneFuture: Future[List[Unit]] = Future.sequence(groups.map(_.done.future)) + val futures: List[Future[_]] = { + outlineDone.future :: javaDone.future :: groups.map(_.done.future) + } val originalClassPath: String = command.settings.classpath.value @@ -429,7 +476,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy reporter.finish() if (reporter.hasErrors) { log("scalac outline: failed") - outlineDone.complete(Failure(new RuntimeException("compile failed"))) + outlineDone.complete(Failure(new RuntimeException(label + ": compile failed: "))) } else { log("scalac outline: done") outlineDone.complete(Success(())) @@ -437,7 +484,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } catch { case t: Throwable => t.printStackTrace() - outlineDone.complete(Failure(new RuntimeException("compile failed"))) + outlineDone.complete(Failure(new RuntimeException(label + ": compile failed: "))) } } @@ -459,7 +506,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy compiler2.reporter.finish() group.timer.stop() if (compiler2.reporter.hasErrors) { - group.done.complete(Failure(new RuntimeException("Compile failed"))) + group.done.complete(Failure(new RuntimeException(label + ": compile failed: "))) } else { group.done.complete(Success(())) } @@ -496,8 +543,9 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy group.timer.stop() if (compiler.reporter.hasErrors) { log("scalac: failed") - outlineDone.complete(Failure(new RuntimeException("Compile failed"))) - group.done.complete(Failure(new RuntimeException("Compile failed"))) + if (!outlineDone.isCompleted) + outlineDone.complete(Failure(new RuntimeException(label + ": compile failed: "))) + group.done.complete(Failure(new RuntimeException(label + ": compile failed: "))) } else { log("scalac: done") // outlineDone.complete(Success(())) @@ -506,8 +554,10 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } catch { case t: Throwable => t.printStackTrace() - outlineDone.complete(Failure(new RuntimeException("Compile failed"))) - group.done.complete(Failure(new RuntimeException("Compile failed"))) + if (!outlineDone.isCompleted) + outlineDone.complete(Failure(new RuntimeException(label + ": compile failed: "))) + if (!group.done.isCompleted) + group.done.complete(Failure(new RuntimeException(label + ": compile failed: "))) } } @@ -601,7 +651,7 @@ object PipelineMainTest { def main(args: Array[String]): Unit = { var i = 0 val argsFiles = Files.walk(Paths.get("/code/boxer")).iterator().asScala.filter(_.getFileName.toString.endsWith(".args")).toList - for (_ <- 1 to 10; n <- List(parallel.availableProcessors); strat <- List(Pipeline, OutlineTypePipeline, Traditional)) { + for (_ <- 1 to 1; n <- List(parallel.availableProcessors); strat <- List(Pipeline)) { i += 1 val main = new PipelineMainClass(strat + "-" + i, n, strat, argsFiles, useJars = true) println(s"====== ITERATION $i=======") From b81adfdc70ff8a4b5ac258f9480b1d1aed6f9213 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 12 Dec 2018 18:08:54 +1000 Subject: [PATCH 63/66] Print final progress, stop timer --- src/compiler/scala/tools/nsc/PipelineMain.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 0ae4a05f308..c6a37936702 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -204,6 +204,8 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy while (true) try { Await.result(awaitAllFutures, Duration(60, "s")) timer.stop() + val numCompleted = allFutures.count(_.isCompleted) + println(s"PROGRESS: $numCompleted / $numAllFutures") return } catch { case _: TimeoutException => From a2a051a607e23880cd6163df882ad29c3c870a7e Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 12 Dec 2018 22:40:40 +1000 Subject: [PATCH 64/66] Separate timer for pickle export and show all times in "done" log messages --- .../scala/tools/nsc/PipelineMain.scala | 36 ++++++++++++------- 1 file changed, 23 insertions(+), 13 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index c6a37936702..dc46ab8d2e6 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -361,6 +361,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy if (p.outlineTimer.durationMicros > 0d) { val desc = if (strategy == OutlineTypePipeline) "outline-type" else "parser-to-pickler" events += durationEvent(p.label, desc, p.outlineTimer) + events += durationEvent(p.label, "pickle-export", p.pickleExportTimer) } for ((g, ix) <- p.groups.zipWithIndex) { if (g.timer.durationMicros > 0d) @@ -434,6 +435,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val isGrouped = groups.size > 1 val outlineTimer = new Timer() + val pickleExportTimer = new Timer val javaTimer = new Timer() var outlineCriticalPathMs = 0d @@ -480,7 +482,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy log("scalac outline: failed") outlineDone.complete(Failure(new RuntimeException(label + ": compile failed: "))) } else { - log("scalac outline: done") + log(f"scala outline: done ${outlineTimer.durationMs}%.0f ms") outlineDone.complete(Success(())) } } catch { @@ -500,13 +502,12 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy group.done.completeWith { Future { log(s"scalac (${ix + 1}/$groupCount): start") + group.timer.start() val compiler2 = newCompiler(command.settings) try { val run2 = new compiler2.Run() - group.timer.start() run2 compile group.files compiler2.reporter.finish() - group.timer.stop() if (compiler2.reporter.hasErrors) { group.done.complete(Failure(new RuntimeException(label + ": compile failed: "))) } else { @@ -514,8 +515,9 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } } finally { compiler2.close() + group.timer.stop() } - log(s"scalac (${ix + 1}/$groupCount): done") + log(f"scalac (${ix + 1}/$groupCount): done ${group.timer.durationMs}%.0f ms") } } } @@ -528,13 +530,16 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy outlineTimer.start() try { val run2 = new compiler.Run() { + override def advancePhase(): Unit = { if (compiler.phase == this.picklerPhase) { - registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get.file.toPath, symData) outlineTimer.stop() + pickleExportTimer.start() + registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get.file.toPath, symData) + pickleExportTimer.stop() + log(f"scalac: exported pickles ${pickleExportTimer.durationMs}%.0f ms") outlineDone.complete(Success(())) group.timer.start() - log("scalac: exported pickles") } super.advancePhase() } @@ -549,7 +554,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy outlineDone.complete(Failure(new RuntimeException(label + ": compile failed: "))) group.done.complete(Failure(new RuntimeException(label + ": compile failed: "))) } else { - log("scalac: done") + log(f"scalac: done ${group.timer.durationMs}%.0f ms") // outlineDone.complete(Success(())) group.done.complete(Success(())) } @@ -564,9 +569,9 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } def javaCompile(): Unit = { - log("javac: start") val javaSources = files.filter(_.endsWith(".java")) if (javaSources.nonEmpty) { + log("javac: start") javaTimer.start() javaDone.completeWith(Future { val opts = java.util.Arrays.asList("-d", command.settings.outdir.value, "-cp", command.settings.outdir.value + File.pathSeparator + originalClassPath) @@ -574,12 +579,12 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy compileTask.setProcessors(Collections.emptyList()) compileTask.call() javaTimer.stop() + log(f"javac: done ${javaTimer.durationMs}%.0f ms") () }) } else { javaDone.complete(Success(())) } - log("javac: done") } def log(msg: String): Unit = println(this.label + ": " + msg) } @@ -597,7 +602,12 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy endNanos = System.nanoTime() } def startMs: Double = startNanos.toDouble / 1000 / 1000 - def durationMs: Double = (endNanos - startNanos).toDouble / 1000 / 1000 + def durationMs: Double = { + val result = (endNanos - startNanos).toDouble / 1000 / 1000 + if (result < 0) + getClass + result + } def startMicros: Double = startNanos.toDouble / 1000d def durationMicros: Double = (endNanos - startNanos).toDouble / 1000d } @@ -652,10 +662,10 @@ object PipelineMain { object PipelineMainTest { def main(args: Array[String]): Unit = { var i = 0 - val argsFiles = Files.walk(Paths.get("/code/boxer")).iterator().asScala.filter(_.getFileName.toString.endsWith(".args")).toList - for (_ <- 1 to 1; n <- List(parallel.availableProcessors); strat <- List(Pipeline)) { + val argsFiles = Files.walk(Paths.get("/code/guardian-frontend")).iterator().asScala.filter(_.getFileName.toString.endsWith(".args")).toList + for (_ <- 1 to 2; n <- List(parallel.availableProcessors); strat <- List(Pipeline)) { i += 1 - val main = new PipelineMainClass(strat + "-" + i, n, strat, argsFiles, useJars = true) + val main = new PipelineMainClass(strat + "-" + i, n, strat, argsFiles, useJars = false) println(s"====== ITERATION $i=======") val result = main.process() if (!result) From e4bf7e814a065031e7d958a3b09c5571edb69125 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 12 Dec 2018 22:54:24 +1000 Subject: [PATCH 65/66] Disable pickle export for leaf projects --- src/compiler/scala/tools/nsc/PipelineMain.scala | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index dc46ab8d2e6..5ed0c7572ac 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -282,7 +282,12 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val f = for { _ <- depsReady _ <- { - p.fullCompileExportPickles() + val isLeaf = !dependedOn.contains(p) + if (isLeaf) { + p.outlineDone.complete(Success(())) + p.fullCompile() + } else + p.fullCompileExportPickles() // Start javac after scalac has completely finished Future.sequence(p.groups.map(_.done.future)) } From 7d60e740253db31871a4ce28886d0ed0716b413c Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 12 Dec 2018 22:59:38 +1000 Subject: [PATCH 66/66] Output outline done status and duration --- src/compiler/scala/tools/nsc/PipelineMain.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 5ed0c7572ac..84ed48a2330 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -539,6 +539,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy override def advancePhase(): Unit = { if (compiler.phase == this.picklerPhase) { outlineTimer.stop() + log(f"scalac outline: done ${outlineTimer.durationMs}%.0f ms") pickleExportTimer.start() registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get.file.toPath, symData) pickleExportTimer.stop()