diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala index a5564242ebf3..4a4003066a70 100644 --- a/project/ScriptCommands.scala +++ b/project/ScriptCommands.scala @@ -109,11 +109,11 @@ object ScriptCommands { Project.setProject(session, newStructure, state) } - private[this] val enableOptimizer = Seq( + val enableOptimizer = Seq( scalacOptions in Compile in ThisBuild ++= Seq("-opt:l:inline", "-opt-inline-from:scala/**") ) - private[this] val noDocs = Seq( + val noDocs = Seq( publishArtifact in (Compile, packageDoc) in ThisBuild := false ) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index d43564e44e3a..a4a877a18d0f 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -438,8 +438,10 @@ class Global(var currentSettings: Settings, reporter0: Reporter) currentRun.informUnitStarting(this, unit) val unit0 = currentUnit currentRun.currentUnit = unit + currentRun.profiler.beforeUnit(phase, unit.source.file) try apply(unit) finally { + currentRun.profiler.afterUnit(phase, unit.source.file) currentRun.currentUnit = unit0 currentRun.advanceUnit() } @@ -1100,6 +1102,9 @@ class Global(var currentSettings: Settings, reporter0: Reporter) def newJavaUnitParser(unit: CompilationUnit): JavaUnitParser = new JavaUnitParser(unit) + override protected[scala] def currentRunProfilerBeforeCompletion(root: Symbol, associatedFile: AbstractFile): Unit = currentRun.profiler.beforeCompletion(root, associatedFile) + override protected[scala] def currentRunProfilerAfterCompletion(root: Symbol, associatedFile: AbstractFile): Unit = currentRun.profiler.afterCompletion(root, associatedFile) + /** A Run is a single execution of the compiler on a set of units. */ class Run extends RunContextApi with RunReporting with RunParsing { @@ -1448,7 +1453,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) private final val GlobalPhaseName = "global (synthetic)" protected final val totalCompileTime = statistics.newTimer("#total compile time", GlobalPhaseName) - def compileUnits(units: List[CompilationUnit], fromPhase: Phase): Unit = compileUnitsInternal(units,fromPhase) + def compileUnits(units: List[CompilationUnit], fromPhase: Phase): Unit = compileUnitsInternal(units,fromPhase) private def compileUnitsInternal(units: List[CompilationUnit], fromPhase: Phase) { units foreach addUnit reporter.reset() diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index 0ba7dad971d3..6fd08a481f12 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -352,7 +352,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { }) val selfParam = ValDef(selfParamSym) val rhs = orig.rhs.substituteThis(newSym.owner, gen.mkAttributedIdent(selfParamSym)) // scala/scala-dev#186 intentionally leaving Ident($this) is unpositioned - .substituteSymbols(origParams, newSym.info.params.drop(1)).changeOwner(origSym -> newSym) + .substituteSymbols(origParams, newSym.info.params.drop(1)).changeOwner(origSym, newSym) treeCopy.DefDef(orig, orig.mods, orig.name, orig.tparams, (selfParam :: orig.vparamss.head) :: Nil, orig.tpt, rhs).setSymbol(newSym) } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala index 4885083938e9..b10b9bb68784 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala @@ -932,7 +932,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { } def genLoadArguments(args: List[Tree], btpes: List[BType]) { - (args zip btpes) foreach { case (arg, btpe) => genLoad(arg, btpe) } + foreach2(args, btpes) { case (arg, btpe) => genLoad(arg, btpe) } } def genLoadModule(tree: Tree): BType = { diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index f44bd0b58ffd..9877076c25dc 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -789,7 +789,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { if (needsAnnotation) { val c = Constant(definitions.RemoteExceptionClass.tpe) val arg = Literal(c) setType c.tpe - meth.addAnnotation(appliedType(definitions.ThrowsClass, c.tpe), arg) + meth.addAnnotation(appliedType(definitions.ThrowsClass, c.tpe :: Nil), arg) } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index d2d1139a519a..b35796f6f736 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -6,7 +6,8 @@ package scala.tools.nsc package backend.jvm -import scala.collection.{concurrent, mutable} +import java.{util => ju} +import scala.collection.concurrent import scala.tools.asm import scala.tools.asm.Opcodes import scala.tools.nsc.backend.jvm.BTypes.{InlineInfo, InternalName} @@ -23,7 +24,7 @@ import scala.tools.nsc.backend.jvm.opt._ */ abstract class BTypes { val frontendAccess: PostProcessorFrontendAccess - import frontendAccess.{frontendSynch, recordPerRunCache} + import frontendAccess.{frontendSynch, recordPerRunJavaMapCache} val coreBTypes: CoreBTypes { val bTypes: BTypes.this.type } import coreBTypes._ @@ -35,13 +36,15 @@ abstract class BTypes { * `getCommonSuperClass`. In this method we need to obtain the ClassBType for a given internal * name. The method assumes that every class type that appears in the bytecode exists in the map */ - def cachedClassBType(internalName: InternalName): Option[ClassBType] = + // OPT: not returning Option[ClassBType] because the Some allocation shows up as a hotspot + def cachedClassBType(internalName: InternalName): ClassBType = classBTypeCache.get(internalName) // Concurrent maps because stack map frames are computed when in the class writer, which // might run on multiple classes concurrently. // Note usage should be private to this file, except for tests - val classBTypeCache: concurrent.Map[InternalName, ClassBType] = recordPerRunCache(FlatConcurrentHashMap.empty) + val classBTypeCache: ju.concurrent.ConcurrentHashMap[InternalName, ClassBType] = + recordPerRunJavaMapCache(new ju.concurrent.ConcurrentHashMap[InternalName, ClassBType]) /** * A BType is either a primitive type, a ClassBType, an ArrayBType of one of these, or a MethodType @@ -809,17 +812,23 @@ abstract class BTypes { def unapply(cr:ClassBType) = Some(cr.internalName) def apply(internalName: InternalName, fromSymbol: Boolean)(init: (ClassBType) => Either[NoClassBTypeInfo, ClassInfo]) = { - val newRes = if (fromSymbol) new ClassBTypeFromSymbol(internalName) else new ClassBTypeFromClassfile(internalName) - // synchronized s required to ensure proper initialisation if info. - // see comment on def info - newRes.synchronized { - classBTypeCache.putIfAbsent(internalName, newRes) match { - case None => - newRes._info = init(newRes) - newRes.checkInfoConsistency() - newRes - case Some(old) => - old + val cached = classBTypeCache.get(internalName) + if (cached ne null) cached + else { + val newRes = + if (fromSymbol) new ClassBTypeFromSymbol(internalName) + else new ClassBTypeFromClassfile(internalName) + // synchronized is required to ensure proper initialisation of info. + // see comment on def info + newRes.synchronized { + classBTypeCache.putIfAbsent(internalName, newRes) match { + case null => + newRes._info = init(newRes) + newRes.checkInfoConsistency() + newRes + case old => + old + } } } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala index 095e5911313a..cd5f74519df9 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala @@ -46,12 +46,10 @@ abstract class BTypesFromClassfile { * be found in the `byteCodeRepository`, the `info` of the resulting ClassBType is undefined. */ def classBTypeFromParsedClassfile(internalName: InternalName): ClassBType = { - cachedClassBType(internalName).getOrElse{ - ClassBType(internalName, false){ res:ClassBType => - byteCodeRepository.classNode(internalName) match { - case Left(msg) => Left(NoClassBTypeInfoMissingBytecode(msg)) - case Right(c) => computeClassInfoFromClassNode(c, res) - } + ClassBType(internalName, fromSymbol = false) { res: ClassBType => + byteCodeRepository.classNode(internalName) match { + case Left(msg) => Left(NoClassBTypeInfoMissingBytecode(msg)) + case Right(c) => computeClassInfoFromClassNode(c, res) } } } @@ -60,10 +58,8 @@ abstract class BTypesFromClassfile { * Construct the [[ClassBType]] for a parsed classfile. */ def classBTypeFromClassNode(classNode: ClassNode): ClassBType = { - cachedClassBType(classNode.name).getOrElse { - ClassBType(classNode.name, false) { res: ClassBType => - computeClassInfoFromClassNode(classNode, res) - } + ClassBType(classNode.name, fromSymbol = false) { res: ClassBType => + computeClassInfoFromClassNode(classNode, res) } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index c919c81a346c..073da11cffce 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -93,19 +93,12 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { else if (classSym == NullClass) srNullRef else { val internalName = classSym.javaBinaryNameString - cachedClassBType(internalName) match { - case Some(bType) => - if (currentRun.compiles(classSym)) - assert(bType fromSymbol, s"ClassBType for class being compiled was already created from a classfile: ${classSym.fullName}") - bType - case None => - // The new ClassBType is added to the map via its apply, before we set its info. This - // allows initializing cyclic dependencies, see the comment on variable ClassBType._info. - ClassBType(internalName, true) { res:ClassBType => - if (completeSilentlyAndCheckErroneous(classSym)) - Left(NoClassBTypeInfoClassSymbolInfoFailedSI9111(classSym.fullName)) - else computeClassInfo(classSym, res) - } + // The new ClassBType is added to the map via its apply, before we set its info. This + // allows initializing cyclic dependencies, see the comment on variable ClassBType._info. + ClassBType(internalName, fromSymbol = true) { res:ClassBType => + if (completeSilentlyAndCheckErroneous(classSym)) + Left(NoClassBTypeInfoClassSymbolInfoFailedSI9111(classSym.fullName)) + else computeClassInfo(classSym, res) } } } @@ -623,33 +616,29 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { def mirrorClassClassBType(moduleClassSym: Symbol): ClassBType = { assert(isTopLevelModuleClass(moduleClassSym), s"not a top-level module class: $moduleClassSym") val internalName = moduleClassSym.javaBinaryNameString.stripSuffix(nme.MODULE_SUFFIX_STRING) - cachedClassBType(internalName).getOrElse { - ClassBType(internalName, true) { c: ClassBType => - val shouldBeLazy = moduleClassSym.isJavaDefined || !currentRun.compiles(moduleClassSym) - val nested = Lazy.withLockOrEager(shouldBeLazy, exitingPickler(memberClassesForInnerClassTable(moduleClassSym)) map classBTypeFromSymbol) - Right(ClassInfo( - superClass = Some(ObjectRef), - interfaces = Nil, - flags = asm.Opcodes.ACC_SUPER | asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_FINAL, - nestedClasses = nested, - nestedInfo = Lazy.eagerNone, - inlineInfo = EmptyInlineInfo.copy(isEffectivelyFinal = true))) // no method inline infos needed, scala never invokes methods on the mirror class - } + ClassBType(internalName, fromSymbol = true) { c: ClassBType => + val shouldBeLazy = moduleClassSym.isJavaDefined || !currentRun.compiles(moduleClassSym) + val nested = Lazy.withLockOrEager(shouldBeLazy, exitingPickler(memberClassesForInnerClassTable(moduleClassSym)) map classBTypeFromSymbol) + Right(ClassInfo( + superClass = Some(ObjectRef), + interfaces = Nil, + flags = asm.Opcodes.ACC_SUPER | asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_FINAL, + nestedClasses = nested, + nestedInfo = Lazy.eagerNone, + inlineInfo = EmptyInlineInfo.copy(isEffectivelyFinal = true))) // no method inline infos needed, scala never invokes methods on the mirror class } } def beanInfoClassClassBType(mainClass: Symbol): ClassBType = { val internalName = mainClass.javaBinaryNameString + "BeanInfo" - cachedClassBType(internalName).getOrElse { - ClassBType(internalName, true) { c: ClassBType => - Right(ClassInfo( - superClass = Some(sbScalaBeanInfoRef), - interfaces = Nil, - flags = javaFlags(mainClass), - nestedClasses = Lazy.eagerNil, - nestedInfo = Lazy.eagerNone, - inlineInfo = EmptyInlineInfo)) - } + ClassBType(internalName, fromSymbol = true) { c: ClassBType => + Right(ClassInfo( + superClass = Some(sbScalaBeanInfoRef), + interfaces = Nil, + flags = javaFlags(mainClass), + nestedClasses = Lazy.eagerNil, + nestedInfo = Lazy.eagerNone, + inlineInfo = EmptyInlineInfo)) } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala index c3b249ad2b93..95417af6a034 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala @@ -148,8 +148,8 @@ abstract class PostProcessor extends PerRunInit { */ override def getCommonSuperClass(inameA: String, inameB: String): String = { // All types that appear in a class node need to have their ClassBType cached, see [[cachedClassBType]]. - val a = cachedClassBType(inameA).get - val b = cachedClassBType(inameB).get + val a = cachedClassBType(inameA) + val b = cachedClassBType(inameB) val lub = a.jvmWiseLUB(b).get val lubName = lub.internalName assert(lubName != "scala/Any") diff --git a/src/compiler/scala/tools/nsc/profile/Profiler.scala b/src/compiler/scala/tools/nsc/profile/Profiler.scala index d0931071b3a1..af547e790271 100644 --- a/src/compiler/scala/tools/nsc/profile/Profiler.scala +++ b/src/compiler/scala/tools/nsc/profile/Profiler.scala @@ -2,31 +2,41 @@ package scala.tools.nsc.profile import java.io.{FileWriter, PrintWriter} import java.lang.management.ManagementFactory +import java.nio.file.{Files, Paths} import java.util.ServiceLoader import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicInteger + import javax.management.openmbean.CompositeData import javax.management.{Notification, NotificationEmitter, NotificationListener} -import scala.tools.nsc.{Phase, Settings} +import scala.collection.mutable +import scala.collection.mutable.ArrayBuffer +import scala.reflect.internal.util.ChromeTrace +import scala.reflect.io.{AbstractFile, File} +import scala.tools.nsc.{Global, Phase, Settings} object Profiler { def apply(settings: Settings):Profiler = if (!settings.YprofileEnabled) NoOpProfiler else { - val reporter = if(settings.YprofileDestination.isSetByUser) - new StreamProfileReporter(new PrintWriter(new FileWriter(settings.YprofileDestination.value, true))) - else ConsoleProfileReporter + val reporter = settings.YprofileDestination.value match { + case _ if !settings.YprofileDestination.isSetByUser => NoOpProfileReporter + case "-" => ConsoleProfileReporter + case path => new StreamProfileReporter(new PrintWriter(new FileWriter(path, true))) + } new RealProfiler(reporter, settings) } - private[profile] val emptySnap = ProfileSnap(0, "", 0, 0, 0, 0, 0, 0) + private[profile] val emptySnap = ProfileSnap(0, "", 0, 0, 0, 0, 0, 0, 0, 0) +} +case class GcEventData(pool:String, reportTimeNs: Long, gcStartMillis:Long, gcEndMillis:Long, durationMillis: Long, name:String, action:String, cause:String, threads:Long) { + val endNanos = System.nanoTime() } -case class GcEventData(pool:String, reportTimeNs: Long, gcStartMillis:Long, gcEndMillis:Long, name:String, action:String, cause:String, threads:Long) case class ProfileSnap(threadId: Long, threadName: String, snapTimeNanos : Long, - idleTimeNanos:Long, cpuTimeNanos: Long, userTimeNanos: Long, - allocatedBytes:Long, heapBytes:Long) { + idleTimeNanos:Long, cpuTimeNanos: Long, userTimeNanos: Long, + allocatedBytes:Long, heapBytes:Long, totalClassesLoaded: Long, totalJITCompilationTime: Long) { def updateHeap(heapBytes:Long) = { copy(heapBytes = heapBytes) } @@ -61,13 +71,29 @@ case class ProfileRange(start: ProfileSnap, end:ProfileSnap, phase:Phase, purpos def retainedHeapMB = toMegaBytes(end.heapBytes - start.heapBytes) } -sealed trait Profiler { +sealed abstract class Profiler { def finished(): Unit def beforePhase(phase: Phase): ProfileSnap def afterPhase(phase: Phase, profileBefore: ProfileSnap): Unit + + def beforeUnit(phase: Phase, file: AbstractFile): Unit + + def afterUnit(phase: Phase, file: AbstractFile): Unit + + def beforeTypedImplDef(sym: Global#Symbol): Unit = () + def afterTypedImplDef(sym: Global#Symbol): Unit = () + + def beforeImplicitSearch(pt: Global#Type): Unit = () + def afterImplicitSearch(pt: Global#Type): Unit = () + + def beforeMacroExpansion(macroSym: Global#Symbol): Unit = () + def afterMacroExpansion(macroSym: Global#Symbol): Unit = () + + def beforeCompletion(root: Global#Symbol, associatedFile: AbstractFile): Unit = () + def afterCompletion(root: Global#Symbol, associatedFile: AbstractFile): Unit = () } private [profile] object NoOpProfiler extends Profiler { @@ -75,6 +101,8 @@ private [profile] object NoOpProfiler extends Profiler { override def afterPhase(phase: Phase, profileBefore: ProfileSnap): Unit = () + override def beforeUnit(phase: Phase, file: AbstractFile): Unit = () + override def afterUnit(phase: Phase, file: AbstractFile): Unit = () override def finished(): Unit = () } private [profile] object RealProfiler { @@ -87,17 +115,55 @@ private [profile] object RealProfiler { val threadMx = ExtendedThreadMxBean.proxy if (threadMx.isThreadCpuTimeSupported) threadMx.setThreadCpuTimeEnabled(true) private val idGen = new AtomicInteger() + lazy val allPlugins = ServiceLoader.load(classOf[ProfilerPlugin]).iterator().asScala.toList + + private[profile] def snapThread( idleTimeNanos:Long): ProfileSnap = { + val current = Thread.currentThread() + val allocatedBytes = threadMx.getThreadAllocatedBytes(Thread.currentThread().getId) + ProfileSnap( + threadId = current.getId, + threadName = current.getName, + snapTimeNanos = System.nanoTime(), + idleTimeNanos = idleTimeNanos, + cpuTimeNanos = threadMx.getCurrentThreadCpuTime, + userTimeNanos = threadMx.getCurrentThreadUserTime, + allocatedBytes = allocatedBytes, + heapBytes = readHeapUsage(), + totalClassesLoaded = classLoaderMx.getTotalLoadedClassCount, + totalJITCompilationTime = compileMx.getTotalCompilationTime + ) + } + private def readHeapUsage() = RealProfiler.memoryMx.getHeapMemoryUsage.getUsed } private [profile] class RealProfiler(reporter : ProfileReporter, val settings: Settings) extends Profiler with NotificationListener { + private val mainThread = Thread.currentThread() + val id = RealProfiler.idGen.incrementAndGet() + object Category { + final val Run = "run" + final val Phase = "phase" + final val File = "file" + final val TypeCheck = "typecheck" + final val Implicit = "implicit" + final val Macro = "macro" + final val Completion = "completion" + } + + private val chromeTrace = { + if (settings.YprofileTrace.isSetByUser) + new ChromeTrace(Paths.get(settings.YprofileTrace.value)) + else null + } + if (chromeTrace != null) + chromeTrace.traceDurationEventStart(Category.Run, "scalac-" + id) + def completeBackground(threadRange: ProfileRange): Unit = { reporter.reportBackground(this, threadRange) } def outDir = settings.outputDirs.getSingleOutput.getOrElse(settings.outputDirs.outputs.head._2.file).toString - val id = RealProfiler.idGen.incrementAndGet() RealProfiler.gcMx foreach { case emitter: NotificationEmitter => emitter.addNotificationListener(this, null, null) case gc => println(s"Cant connect gcListener to ${gc.getClass}") @@ -105,25 +171,6 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S val active = RealProfiler.allPlugins map (_.generate(this, settings)) - private val mainThread = Thread.currentThread() - - private[profile] def snapThread( idleTimeNanos:Long): ProfileSnap = { - import RealProfiler._ - val current = Thread.currentThread() - - ProfileSnap( - threadId = current.getId, - threadName = current.getName, - snapTimeNanos = System.nanoTime(), - idleTimeNanos = idleTimeNanos, - cpuTimeNanos = threadMx.getCurrentThreadCpuTime, - userTimeNanos = threadMx.getCurrentThreadUserTime, - allocatedBytes = threadMx.getThreadAllocatedBytes(Thread.currentThread().getId), - heapBytes = readHeapUsage() - ) - } - private def readHeapUsage() = RealProfiler.memoryMx.getHeapMemoryUsage.getUsed - private def doGC: Unit = { System.gc() System.runFinalization() @@ -139,8 +186,19 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S case gc => } reporter.close(this) + if (chromeTrace != null) { + for (gcEvent <- gcEvents) { + val durationNanos = TimeUnit.MILLISECONDS.toNanos(gcEvent.durationMillis) + val startNanos = gcEvent.endNanos - durationNanos + chromeTrace.traceDurationEvent(gcEvent.name, startNanos, durationNanos, GcThreadId) + } + chromeTrace.traceDurationEventEnd(Category.Run, "scalac-" + id) + chromeTrace.close() + } } + private val gcEvents = ArrayBuffer[GcEventData]() + private val GcThreadId = "GC" override def handleNotification(notification: Notification, handback: scala.Any): Unit = { import java.lang.{Long => jLong} @@ -161,13 +219,30 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S val startTime = info.get("startTime").asInstanceOf[jLong].longValue() val endTime = info.get("endTime").asInstanceOf[jLong].longValue() val threads = info.get("GcThreadCount").asInstanceOf[jInt].longValue() - reporter.reportGc(GcEventData("", reportNs, startTime, endTime, name, action, cause, threads)) + val gcEvent = GcEventData("", reportNs, startTime, endTime, duration, name, action, cause, threads) + synchronized { + gcEvents += gcEvent + } + reporter.reportGc(gcEvent) + } + } + + override def beforePhase(phase: Phase): ProfileSnap = { + assert(mainThread eq Thread.currentThread()) + if (chromeTrace != null) chromeTrace.traceDurationEventStart(Category.Phase, phase.name) + if (settings.YprofileRunGcBetweenPhases.containsPhase(phase)) + doGC + if (settings.YprofileExternalTool.containsPhase(phase)) { + println("Profile hook start") + ExternalToolHook.before() } + active foreach {_.beforePhase(phase)} + RealProfiler.snapThread(0) } override def afterPhase(phase: Phase, snapBefore: ProfileSnap): Unit = { assert(mainThread eq Thread.currentThread()) - val initialSnap = snapThread(0) + val initialSnap = RealProfiler.snapThread(0) active foreach {_.afterPhase(phase)} if (settings.YprofileExternalTool.containsPhase(phase)) { println("Profile hook stop") @@ -175,24 +250,85 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S } val finalSnap = if (settings.YprofileRunGcBetweenPhases.containsPhase(phase)) { doGC - initialSnap.updateHeap(readHeapUsage()) + initialSnap.updateHeap(RealProfiler.readHeapUsage()) } else initialSnap + if (chromeTrace != null) chromeTrace.traceDurationEventEnd(Category.Phase, phase.name) reporter.reportForeground(this, ProfileRange(snapBefore, finalSnap, phase, "", 0, Thread.currentThread)) } - override def beforePhase(phase: Phase): ProfileSnap = { + override def beforeUnit(phase: Phase, file: AbstractFile): Unit = { assert(mainThread eq Thread.currentThread()) - if (settings.YprofileRunGcBetweenPhases.containsPhase(phase)) - doGC - if (settings.YprofileExternalTool.containsPhase(phase)) { - println("Profile hook start") - ExternalToolHook.before() + if (chromeTrace != null) chromeTrace.traceDurationEventStart(Category.File, file.name) + } + + private var nextAfterUnitSnap: Long = System.nanoTime() + + override def afterUnit(phase: Phase, file: AbstractFile): Unit = { + assert(mainThread eq Thread.currentThread()) + if (chromeTrace != null) { + val now = System.nanoTime() + chromeTrace.traceDurationEventEnd(Category.File, file.name) + if (now > nextAfterUnitSnap) { + val initialSnap = RealProfiler.snapThread(0) + chromeTrace.traceCounterEvent("allocBytes", "allocBytes", initialSnap.allocatedBytes, processWide = false) + chromeTrace.traceCounterEvent("heapBytes", "heapBytes", initialSnap.heapBytes, processWide = true) + chromeTrace.traceCounterEvent("classesLoaded", "classesLoaded", initialSnap.totalClassesLoaded, processWide = true) + chromeTrace.traceCounterEvent("jitCompilationTime", "jitCompilationTime", initialSnap.totalJITCompilationTime, processWide = true) + chromeTrace.traceCounterEvent("userTime", "userTime", initialSnap.userTimeNanos, processWide = false) + chromeTrace.traceCounterEvent("cpuTime", "cpuTime", initialSnap.cpuTimeNanos, processWide = false) + chromeTrace.traceCounterEvent("idleTime", "idleTime", initialSnap.idleTimeNanos, processWide = false) + nextAfterUnitSnap = System.nanoTime() + 10 * 1000 * 1000 + } } - active foreach {_.beforePhase(phase)} - snapThread(0) } + override def beforeTypedImplDef(sym: Global#Symbol): Unit = { + if (chromeTrace != null) chromeTrace.traceDurationEventStart(Category.TypeCheck, sym.rawname.toString) + } + override def afterTypedImplDef(sym: Global#Symbol): Unit = { + if (chromeTrace != null) chromeTrace.traceDurationEventEnd(Category.TypeCheck, sym.rawname.toString) + } + + override def beforeImplicitSearch(pt: Global#Type): Unit = { + if (chromeTrace != null) chromeTrace.traceDurationEventStart(Category.Implicit, "?[" + pt.typeSymbol.rawname + "]", colour = "yellow") + } + + override def afterImplicitSearch(pt: Global#Type): Unit = { + if (chromeTrace != null) chromeTrace.traceDurationEventEnd(Category.Implicit, "?[" + pt.typeSymbol.rawname + "]", colour = "yellow") + } + + override def beforeMacroExpansion(macroSym: Global#Symbol): Unit = { + if (chromeTrace != null) chromeTrace.traceDurationEventStart(Category.Macro, "«" + macroSym.rawname + "»", colour = "olive") + } + + override def afterMacroExpansion(macroSym: Global#Symbol): Unit = { + if (chromeTrace != null) chromeTrace.traceDurationEventEnd(Category.Macro, "«" + macroSym.rawname + "»", colour = "olive") + } + + override def beforeCompletion(root: Global#Symbol, associatedFile: AbstractFile): Unit = { + if (chromeTrace != null) { + chromeTrace.traceDurationEventStart(Category.Completion, "↯", colour = "thread_state_sleeping") + chromeTrace.traceDurationEventStart(Category.File, associatedFile.name) + chromeTrace.traceDurationEventStart(Category.Completion, completionName(root, associatedFile)) + } + } + + override def afterCompletion(root: Global#Symbol, associatedFile: AbstractFile): Unit = { + if (chromeTrace != null) { + chromeTrace.traceDurationEventEnd(Category.Completion, completionName(root, associatedFile)) + chromeTrace.traceDurationEventEnd(Category.File, associatedFile.name) + chromeTrace.traceDurationEventEnd(Category.Completion, "↯", colour = "thread_state_sleeping") + } + } + + private def completionName(root: Global#Symbol, associatedFile: AbstractFile): String = { + if (root.hasPackageFlag || root.isTopLevel) root.javaBinaryNameString + else { + val enclosing = root.enclosingTopLevelClass + enclosing.javaBinaryNameString + "::" + root.rawname.toString + } + } } object EventType extends Enumeration { @@ -216,24 +352,23 @@ sealed trait ProfileReporter { } object ConsoleProfileReporter extends ProfileReporter { + private val outWriter = new PrintWriter(Console.out) + private val delegate = new StreamProfileReporter(new PrintWriter(Console.out)) + override def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit = delegate.reportBackground(profiler, threadRange) + override def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit = delegate.reportForeground(profiler, threadRange) + override def close(profiler: RealProfiler): Unit = outWriter.flush() + + override def header(profiler: RealProfiler): Unit = delegate.header(profiler) + override def reportGc(data: GcEventData): Unit = delegate.reportGc(data) +} - - override def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit = - // TODO - ??? - override def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit = - // TODO - ??? - +object NoOpProfileReporter extends ProfileReporter { + override def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit = () + override def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit = () override def close(profiler: RealProfiler): Unit = () - override def header(profiler: RealProfiler): Unit = { - println(s"Profiler start (${profiler.id}) ${profiler.outDir}") - } - - override def reportGc(data: GcEventData): Unit = { - println(f"Profiler GC reported ${data.gcEndMillis - data.gcStartMillis}ms") - } + override def header(profiler: RealProfiler): Unit = () + override def reportGc(data: GcEventData): Unit = () } class StreamProfileReporter(out:PrintWriter) extends ProfileReporter { @@ -259,10 +394,8 @@ class StreamProfileReporter(out:PrintWriter) extends ProfileReporter { out.println(s"${EventType.GC},$start,${data.reportTimeNs},${data.gcStartMillis}, ${data.gcEndMillis},${data.name},${data.action},${data.cause},${data.threads}") } - override def close(profiler: RealProfiler): Unit = { - out.flush - out.close + out.flush() + out.close() } } - diff --git a/src/compiler/scala/tools/nsc/profile/ThreadPoolFactory.scala b/src/compiler/scala/tools/nsc/profile/ThreadPoolFactory.scala index 33d8cefde10b..f3204a0b8072 100644 --- a/src/compiler/scala/tools/nsc/profile/ThreadPoolFactory.scala +++ b/src/compiler/scala/tools/nsc/profile/ThreadPoolFactory.scala @@ -86,9 +86,9 @@ object ThreadPoolFactory { val data = new ThreadProfileData localData.set(data) - val profileStart = profiler.snapThread(0) + val profileStart = RealProfiler.snapThread(0) try worker.run finally { - val snap = profiler.snapThread(data.idleNs) + val snap = RealProfiler.snapThread(data.idleNs) val threadRange = ProfileRange(profileStart, snap, phase, shortId, data.taskCount, Thread.currentThread()) profiler.completeBackground(threadRange) } diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala index bddef769be99..60650c48e0dc 100644 --- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala @@ -11,7 +11,7 @@ package settings import io.{ AbstractFile, Jar, Path, PlainFile, VirtualDirectory } import scala.collection.generic.Clearable import scala.io.Source -import scala.reflect.internal.util.StringOps +import scala.reflect.internal.util.{ SomeOfNil, StringOps } import scala.reflect.{ ClassTag, classTag } /** A mutable Settings object. @@ -127,7 +127,7 @@ class MutableSettings(val errorFn: String => Unit) // -Xfoo: clears Clearables def clearIfExists(cmd: String): Option[List[String]] = lookupSetting(cmd) match { - case Some(c: Clearable) => c.clear() ; Some(Nil) + case Some(c: Clearable) => c.clear() ; SomeOfNil case Some(s) => s.errorAndValue(s"Missing argument to $cmd", None) case None => None } @@ -463,10 +463,10 @@ class MutableSettings(val errorFn: String => Unit) case List(x) => if (x.equalsIgnoreCase("true")) { value = true - Some(Nil) + SomeOfNil } else if (x.equalsIgnoreCase("false")) { value = false - Some(Nil) + SomeOfNil } else errorAndValue(s"'$x' is not a valid choice for '$name'", None) case _ => errorAndValue(s"'$name' accepts only one boolean value", None) } @@ -867,8 +867,8 @@ class MutableSettings(val errorFn: String => Unit) override def tryToSetColon(args: List[String]) = args match { case Nil => errorAndValue(usageErrorMessage, None) - case List("help") => sawHelp = true; Some(Nil) - case List(x) if choices contains x => value = x ; Some(Nil) + case List("help") => sawHelp = true; SomeOfNil + case List(x) if choices contains x => value = x ; SomeOfNil case List(x) => errorAndValue("'" + x + "' is not a valid choice for '" + name + "'", None) case xs => errorAndValue("'" + name + "' does not accept multiple arguments.", None) } @@ -933,7 +933,7 @@ class MutableSettings(val errorFn: String => Unit) args match { case Nil => if (default == "") errorAndValue("missing phase", None) else tryToSetColon(splitDefault) - case xs => value = (value ++ xs).distinct.sorted ; Some(Nil) + case xs => value = (value ++ xs).distinct.sorted ; SomeOfNil } } catch { case _: NumberFormatException => None } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index a0fbedc03924..a79bc9085f16 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -406,7 +406,9 @@ trait ScalaSettings extends AbsScalaSettings override def YhotStatisticsEnabled = YhotStatistics.value val YprofileEnabled = BooleanSetting("-Yprofile-enabled", "Enable profiling.") - val YprofileDestination = StringSetting("-Yprofile-destination", "file", "where to send profiling output - specify a file, default is to the console.", ""). + val YprofileDestination = StringSetting("-Yprofile-destination", "file", "Profiling output - specify a file or `-` for console.", ""). + withPostSetHook( _ => YprofileEnabled.value = true ) + val YprofileTrace = StringSetting("-Yprofile-trace", "file", "Capture trace of compilation in Chrome Trace format", "profile.trace"). withPostSetHook( _ => YprofileEnabled.value = true ) val YprofileExternalTool = PhasesSetting("-Yprofile-external-tool", "Enable profiling for a phase using an external tool hook. Generally only useful for a single phase", "typer"). withPostSetHook( _ => YprofileEnabled.value = true ) diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 85ea78c912a7..70ca742b7b4b 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -196,6 +196,7 @@ abstract class SymbolLoaders { protected def doComplete(root: Symbol): Unit def sourcefile: Option[AbstractFile] = None + def associatedFile(self: Symbol): AbstractFile = NoAbstractFile /** * Description of the resource (ClassPath, AbstractFile) @@ -214,23 +215,29 @@ abstract class SymbolLoaders { } override def complete(root: Symbol) { + val assocFile = associatedFile(root) + currentRunProfilerBeforeCompletion(root, assocFile) try { - val start = java.util.concurrent.TimeUnit.NANOSECONDS.toMillis(System.nanoTime()) - val currentphase = phase - doComplete(root) - phase = currentphase - informTime("loaded " + description, start) - ok = true - setSource(root) - setSource(root.companionSymbol) // module -> class, class -> module - } - catch { - case ex @ (_: IOException | _: MissingRequirementError) => - ok = false - signalError(root, ex) + try { + val start = java.util.concurrent.TimeUnit.NANOSECONDS.toMillis(System.nanoTime()) + val currentphase = phase + doComplete(root) + phase = currentphase + informTime("loaded " + description, start) + ok = true + setSource(root) + setSource(root.companionSymbol) // module -> class, class -> module + } + catch { + case ex@(_: IOException | _: MissingRequirementError) => + ok = false + signalError(root, ex) + } + initRoot(root) + if (!root.isPackageClass) initRoot(root.companionSymbol) + } finally { + currentRunProfilerAfterCompletion(root, assocFile) } - initRoot(root) - if (!root.isPackageClass) initRoot(root.companionSymbol) } override def load(root: Symbol) { complete(root) } @@ -329,18 +336,27 @@ abstract class SymbolLoaders { if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.classReadNanos, start) } override def sourcefile: Option[AbstractFile] = classfileParser.srcfile + override def associatedFile(self: Symbol): AbstractFile = classfile } class SourcefileLoader(val srcfile: AbstractFile) extends SymbolLoader with FlagAssigningCompleter { protected def description = "source file "+ srcfile.toString override def fromSource = true override def sourcefile = Some(srcfile) + override def associatedFile(self: Symbol): AbstractFile = srcfile protected def doComplete(root: Symbol): Unit = compileLate(srcfile) } object moduleClassLoader extends SymbolLoader with FlagAssigningCompleter { protected def description = "module class loader" protected def doComplete(root: Symbol) { root.sourceModule.initialize } + override def associatedFile(self: Symbol): AbstractFile = { + val sourceModule = self.sourceModule + sourceModule.rawInfo match { + case loader: SymbolLoader => loader.associatedFile(sourceModule) + case _ => super.associatedFile(self) + } + } } /** used from classfile parser to avoid cycles */ diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index f99b85b7cfdd..49f88f8e739b 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -1094,7 +1094,7 @@ abstract class ClassfileParser { def addParamNames(): Unit = if ((paramNames ne null) && sym.hasRawInfo && sym.isMethod) { val params = sym.rawInfo.params - (paramNames zip params).foreach { + foreach2(paramNames.toList, params) { case (nme.NO_NAME, _) => // param was ACC_SYNTHETIC; ignore case (name, param) => param.resetFlag(SYNTHETIC) @@ -1150,6 +1150,16 @@ abstract class ClassfileParser { mod.moduleClass setInfo loaders.moduleClassLoader cls.associatedFile = file mod.moduleClass.associatedFile = file + + /** + * need to set privateWithin here because the classfile of a nested protected class is public in bytecode, + * so propagatePackageBoundary will not set it when the symbols are completed + */ + if (jflags.isProtected) { + cls.privateWithin = cls.enclosingPackage + mod.privateWithin = cls.enclosingPackage + } + (cls, mod) } diff --git a/src/compiler/scala/tools/nsc/transform/AccessorSynthesis.scala b/src/compiler/scala/tools/nsc/transform/AccessorSynthesis.scala index 851482af6e59..c135de373e02 100644 --- a/src/compiler/scala/tools/nsc/transform/AccessorSynthesis.scala +++ b/src/compiler/scala/tools/nsc/transform/AccessorSynthesis.scala @@ -278,7 +278,7 @@ trait AccessorSynthesis extends Transform with ast.TreeDSL { */ def expandLazyClassMember(lazyVar: global.Symbol, lazyAccessor: global.Symbol, transformedRhs: global.Tree): Tree = { val slowPathSym = slowPathFor(lazyAccessor) - val rhsAtSlowDef = transformedRhs.changeOwner(lazyAccessor -> slowPathSym) + val rhsAtSlowDef = transformedRhs.changeOwner(lazyAccessor, slowPathSym) val isUnit = isUnitGetter(lazyAccessor) val selectVar = if (isUnit) UNIT else Select(thisRef, lazyVar) diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index b3e2e7ae6ba3..8cf0e4c7c2b6 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -250,7 +250,7 @@ abstract class Constructors extends Statics with Transform with TypingTransforme methodSym setInfoAndEnter MethodType(Nil, UnitTpe) // changeOwner needed because the `stats` contained in the DefDef were owned by the template, not long ago. - val blk = Block(stats, gen.mkZero(UnitTpe)).changeOwner(impl.symbol -> methodSym) + val blk = Block(stats, gen.mkZero(UnitTpe)).changeOwner(impl.symbol, methodSym) val delayedDD = localTyper typed { DefDef(methodSym, Nil, blk) } delayedDD.asInstanceOf[DefDef] @@ -549,7 +549,7 @@ abstract class Constructors extends Statics with Transform with TypingTransforme // Move tree into constructor, take care of changing owner from `oldOwner` to `newOwner` (the primary constructor symbol) def apply(oldOwner: Symbol, newOwner: Symbol)(tree: Tree) = if (tree eq EmptyTree) tree - else transform(tree.changeOwner(oldOwner -> newOwner)) + else transform(tree.changeOwner(oldOwner, newOwner)) } // Assign `rhs` to class field / trait setter `assignSym` diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala index b97e54f10f81..f21a28ccc72c 100644 --- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala +++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala @@ -155,7 +155,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { // so must drop their variance. val tparamsFromClass = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT) - val thisParamType = appliedType(clazz, tparamsFromClass map (_.tpeHK): _*) + val thisParamType = appliedType(clazz, tparamsFromClass.map(_.tpeHK)) val thisParam = extensionMeth.newValueParameter(nme.SELF, extensionMeth.pos) setInfo thisParamType val resultType = MethodType(List(thisParam), dropNullaryMethod(methodResult)) val selfParamType = singleType(currentOwner.companionModule.thisType, thisParam) @@ -229,7 +229,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { .substituteSymbols(origTpeParams, extensionTpeParams) .substituteSymbols(origParams, extensionParams) .substituteThis(origThis, extensionThis) - .changeOwner(origMeth -> extensionMeth) + .changeOwner(origMeth, extensionMeth) new SubstututeRecursion(origMeth, extensionMeth, unit).transform(tree) } val castBody = diff --git a/src/compiler/scala/tools/nsc/transform/Fields.scala b/src/compiler/scala/tools/nsc/transform/Fields.scala index 029b7b951b4d..cf5cf75ba01a 100644 --- a/src/compiler/scala/tools/nsc/transform/Fields.scala +++ b/src/compiler/scala/tools/nsc/transform/Fields.scala @@ -600,7 +600,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor val computerSym = owner.newMethod(lazyName append nme.LAZY_SLOW_SUFFIX, pos, ARTIFACT | PRIVATE) setInfo MethodType(Nil, lazyValType) - val rhsAtComputer = rhs.changeOwner(lazySym -> computerSym) + val rhsAtComputer = rhs.changeOwner(lazySym, computerSym) val computer = mkAccessor(computerSym)(gen.mkSynchronized(Ident(holderSym))( If(initialized, getValue, @@ -690,7 +690,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor } def rhsAtOwner(stat: ValOrDefDef, newOwner: Symbol): Tree = - atOwner(newOwner)(super.transform(stat.rhs.changeOwner(stat.symbol -> newOwner))) + atOwner(newOwner)(super.transform(stat.rhs.changeOwner(stat.symbol, newOwner))) override def transform(stat: Tree): Tree = { val currOwner = currentOwner // often a class, but not necessarily diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 8a466ca3305d..51bb8296c978 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -130,7 +130,7 @@ abstract class UnCurry extends InfoTransform /** The type of a non-local return expression with given argument type */ private def nonLocalReturnExceptionType(argtype: Type) = - appliedType(NonLocalReturnControlClass, argtype) + appliedType(NonLocalReturnControlClass, argtype :: Nil) /** A hashmap from method symbols to non-local return keys */ private val nonLocalReturnKeys = perRunCaches.newMap[Symbol, Symbol]() @@ -336,7 +336,7 @@ abstract class UnCurry extends InfoTransform case body => val thunkFun = localTyper.typedPos(body.pos)(Function(Nil, body)).asInstanceOf[Function] log(s"Change owner from $currentOwner to ${thunkFun.symbol} in ${thunkFun.body}") - thunkFun.body.changeOwner((currentOwner, thunkFun.symbol)) + thunkFun.body.changeOwner(currentOwner, thunkFun.symbol) transformFunction(thunkFun) } } @@ -400,7 +400,7 @@ abstract class UnCurry extends InfoTransform debuglog("lifting tree at: " + (tree.pos)) val sym = currentOwner.newMethod(unit.freshTermName("liftedTree"), tree.pos) sym.setInfo(MethodType(List(), tree.tpe)) - tree.changeOwner(currentOwner -> sym) + tree.changeOwner(currentOwner, sym) localTyper.typedPos(tree.pos)(Block( List(DefDef(sym, ListOfNil, tree)), Apply(Ident(sym), Nil) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala index 27fdfe806b94..1c4e7caf1ff8 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala @@ -857,7 +857,7 @@ trait MatchAnalysis extends MatchApproximation { val argLen = (caseFieldAccs.length min ctorParams.length) val examples = (0 until argLen).map(i => fields.get(caseFieldAccs(i)).map(_.toCounterExample(brevity)) getOrElse Some(WildcardExample)).toList - sequence(examples) + sequenceOpt(examples) } cls match { diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala index de41991c90ab..837f5158f971 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala @@ -89,8 +89,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { // store the result of the final test and the intermediate results in hoisted mutable variables (TODO: optimize: don't store intermediate results that aren't used) // replace each reference to a variable originally bound by a collapsed test by a reference to the hoisted variable val reused = new mutable.HashMap[TreeMaker, ReusedCondTreeMaker] - var okToCall = false - val reusedOrOrig = (tm: TreeMaker) => {assert(okToCall); reused.getOrElse(tm, tm)} + val reusedOrOrig = (tm: TreeMaker) => reused.getOrElse(tm, tm) // maybe collapse: replace shared prefix of tree makers by a ReusingCondTreeMaker // once this has been computed, we'll know which tree makers are reused, @@ -128,7 +127,6 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { collapsedTreeMakers getOrElse tests.map(_.treeMaker) // sharedPrefix need not be empty (but it only contains True-tests, which are dropped above) } - okToCall = true // TODO: remove (debugging) // replace original treemakers that are reused (as determined when computing collapsed), // by ReusedCondTreeMakers @@ -416,7 +414,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { // generate if-then-else for 1 case switch (avoids verify error... can't imagine a one-case switch being faster than if-then-else anyway) if (cases.isEmpty || cases.tail.isEmpty) Nil else { - val caseDefs = cases map { case (scrutSym, makers) => + val caseDefs = traverseOpt(cases) { case (scrutSym, makers) => makers match { // default case case GuardAndBodyTreeMakers(guard, body) => @@ -426,15 +424,15 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { Some(CaseDef(pattern, guard, body)) // alternatives case AlternativesTreeMaker(_, altss, pos) :: GuardAndBodyTreeMakers(guard, body) if alternativesSupported => - val switchableAlts = altss map { + // succeed iff they were all switchable + val switchableAlts = traverseOpt(altss) { case SwitchableTreeMaker(pattern) :: Nil => Some(pattern) case _ => None } - // succeed if they were all switchable - sequence(switchableAlts) map { switchableAlts => + switchableAlts map { switchableAlts => def extractConst(t: Tree) = t match { case Literal(const) => const case _ => t @@ -453,7 +451,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { } } - val caseDefsWithGuards = sequence(caseDefs) match { + val caseDefsWithGuards = caseDefs match { case None => return Nil case Some(cds) => cds } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala index e56110cb6bb2..02a28999690a 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala @@ -10,6 +10,7 @@ package transform package patmat import scala.tools.nsc.typechecker.Contexts +import scala.reflect.internal.util /** An 'extractor' can be a case class or an unapply or unapplySeq method. * @@ -157,7 +158,7 @@ trait PatternExpansion { else None } - private def booleanUnapply = if (isBooleanUnapply) Some(Nil) else None + private def booleanUnapply = if (isBooleanUnapply) util.SomeOfNil else None // In terms of the (equivalent -- if we're dealing with an unapply) case class, what are the constructor's parameter types? private val equivConstrParamTypes = diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala index ce9923ee7f05..3a4a1243d288 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala @@ -77,7 +77,7 @@ trait Checkable { def propagateKnownTypes(from: Type, to: Symbol): Type = { def tparams = to.typeParams val tvars = tparams map (p => TypeVar(p)) - val tvarType = appliedType(to, tvars: _*) + val tvarType = appliedType(to, tvars) val bases = from.baseClasses filter (to.baseClasses contains _) bases foreach { bc => @@ -104,7 +104,7 @@ trait Checkable { case (_, tvar) if tvar.instValid => tvar.constr.inst case (tparam, _) => tparam.tpeHK } - appliedType(to, resArgs: _*) + appliedType(to, resArgs) } private def isUnwarnableTypeArgSymbol(sym: Symbol) = ( diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 5eae827baa2b..6c174c5c7338 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -8,7 +8,7 @@ package typechecker import scala.collection.{ immutable, mutable } import scala.annotation.tailrec -import scala.reflect.internal.util.shortClassOfInstance +import scala.reflect.internal.util.{ shortClassOfInstance, SomeOfNil } import scala.tools.nsc.reporters.Reporter /** @@ -938,7 +938,7 @@ trait Contexts { self: Analyzer => // the corresponding package object may contain implicit members. val pre = owner.packageObject.typeOfThis Some(collectImplicits(pre.implicitMembers, pre)) - } else SomeNil + } else SomeOfNil } // @@ -1567,7 +1567,6 @@ trait Contexts { self: Analyzer => private def imp1Explicit = imp1 isExplicitImport name private def imp2Explicit = imp2 isExplicitImport name } - private final val SomeNil = Some(Nil) } object ContextMode { diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 96c067c38b7f..583bee6ea3a0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -80,6 +80,15 @@ trait Implicits { * @return A search result */ def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context, saveAmbiguousDivergent: Boolean, pos: Position): SearchResult = { + currentRun.profiler.beforeImplicitSearch(pt) + try { + inferImplicit1(tree, pt, reportAmbiguous, isView, context, saveAmbiguousDivergent, pos) + } finally { + currentRun.profiler.afterImplicitSearch(pt) + } + } + + private def inferImplicit1(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context, saveAmbiguousDivergent: Boolean, pos: Position): SearchResult = { // Note that the isInvalidConversionTarget seems to make a lot more sense right here, before all the // work is performed, than at the point where it presently exists. val shouldPrint = printTypings && !context.undetparams.isEmpty @@ -1329,7 +1338,7 @@ trait Implicits { /* Re-wraps a type in a manifest before calling inferImplicit on the result */ def findManifest(tp: Type, manifestClass: Symbol = if (full) FullManifestClass else PartialManifestClass) = - inferImplicitFor(appliedType(manifestClass, tp), tree, context).tree + inferImplicitFor(appliedType(manifestClass, tp :: Nil), tree, context).tree def findSubManifest(tp: Type) = findManifest(tp, if (full) FullManifestClass else OptManifestClass) def mot(tp0: Type, from: List[Symbol], to: List[Type]): SearchResult = { diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 4cb9c2ca39d7..3a3a90ae3823 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -754,7 +754,15 @@ trait Macros extends MacroRuntimes with Traces with Helpers { /** Expands a term macro used in apply role as `M(2)(3)` in `val x = M(2)(3)`. * @see DefMacroExpander */ - def macroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Tree = pluginsMacroExpand(typer, expandee, mode, pt) + def macroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Tree = { + val macroSym = expandee.symbol + currentRun.profiler.beforeMacroExpansion(macroSym) + try { + pluginsMacroExpand(typer, expandee, mode, pt) + } finally { + currentRun.profiler.afterMacroExpansion(macroSym) + } + } /** Default implementation of `macroExpand`. * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsMacroExpand for more details) diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index 0dbb0e860b25..7fc64af4a27b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -181,7 +181,7 @@ trait NamesDefaults { self: Analyzer => blockTyper.context.scope enter sym val vd = atPos(sym.pos)(ValDef(sym, qual) setType NoType) // it stays in Vegas: scala/bug#5720, scala/bug#5727 - qual changeOwner (blockTyper.context.owner -> sym) + qual changeOwner (blockTyper.context.owner, sym) val newQual = atPos(qual.pos.focus)(blockTyper.typedQualifier(Ident(sym.name))) val baseFunTransformed = atPos(baseFun.pos.makeTransparent) { diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 314b856dab28..3c4e88334a11 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -123,10 +123,10 @@ trait TypeDiagnostics { */ final def exampleTuplePattern(names: List[Name]): String = { val arity = names.length - val varPatternNames: Option[List[String]] = sequence(names map { + val varPatternNames: Option[List[String]] = traverseOpt(names) { case name if nme.isVariableName(name) => Some(name.decode) case _ => None - }) + } def parenthesize(a: String) = s"($a)" def genericParams = (Seq("param1") ++ (if (arity > 2) Seq("...") else Nil) ++ Seq(s"param$arity")) parenthesize(varPatternNames.getOrElse(genericParams).mkString(", ")) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 2f828154c5d0..2679ce6394f9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1814,38 +1814,43 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typedClassDef(cdef: ClassDef): Tree = { val clazz = cdef.symbol - val typedMods = typedModifiers(cdef.mods) - assert(clazz != NoSymbol, cdef) - reenterTypeParams(cdef.tparams) - val tparams1 = cdef.tparams mapConserve (typedTypeDef) - val impl1 = newTyper(context.make(cdef.impl, clazz, newScope)).typedTemplate(cdef.impl, typedParentTypes(cdef.impl)) - val impl2 = finishMethodSynthesis(impl1, clazz, context) - if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.typeSymbol == AnyClass) - checkEphemeral(clazz, impl2.body) - - if ((clazz isNonBottomSubClass ClassfileAnnotationClass) && (clazz != ClassfileAnnotationClass)) { - if (!clazz.owner.isPackageClass) - context.error(clazz.pos, "inner classes cannot be classfile annotations") - // Ignore @SerialVersionUID, because it is special-cased and handled completely differently. - // It only extends ClassfileAnnotationClass instead of StaticAnnotation to get the enforcement - // of constant argument values "for free". Related to scala/bug#7041. - else if (clazz != SerialVersionUIDAttr) restrictionWarning(cdef.pos, unit, - """|subclassing Classfile does not - |make your annotation visible at runtime. If that is what - |you want, you must write the annotation class in Java.""".stripMargin) - } - - warnTypeParameterShadow(tparams1, clazz) - - if (!isPastTyper) { - for (ann <- clazz.getAnnotation(DeprecatedAttr)) { - val m = companionSymbolOf(clazz, context) - if (m != NoSymbol) - m.moduleClass.addAnnotation(AnnotationInfo(ann.atp, ann.args, List())) - } - } - treeCopy.ClassDef(cdef, typedMods, cdef.name, tparams1, impl2) - .setType(NoType) + currentRun.profiler.beforeTypedImplDef(clazz) + try { + val typedMods = typedModifiers(cdef.mods) + assert(clazz != NoSymbol, cdef) + reenterTypeParams(cdef.tparams) + val tparams1 = cdef.tparams mapConserve (typedTypeDef) + val impl1 = newTyper(context.make(cdef.impl, clazz, newScope)).typedTemplate(cdef.impl, typedParentTypes(cdef.impl)) + val impl2 = finishMethodSynthesis(impl1, clazz, context) + if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.typeSymbol == AnyClass) + checkEphemeral(clazz, impl2.body) + + if ((clazz isNonBottomSubClass ClassfileAnnotationClass) && (clazz != ClassfileAnnotationClass)) { + if (!clazz.owner.isPackageClass) + context.error(clazz.pos, "inner classes cannot be classfile annotations") + // Ignore @SerialVersionUID, because it is special-cased and handled completely differently. + // It only extends ClassfileAnnotationClass instead of StaticAnnotation to get the enforcement + // of constant argument values "for free". Related to scala/bug#7041. + else if (clazz != SerialVersionUIDAttr) restrictionWarning(cdef.pos, unit, + """|subclassing Classfile does not + |make your annotation visible at runtime. If that is what + |you want, you must write the annotation class in Java.""".stripMargin) + } + + warnTypeParameterShadow(tparams1, clazz) + + if (!isPastTyper) { + for (ann <- clazz.getAnnotation(DeprecatedAttr)) { + val m = companionSymbolOf(clazz, context) + if (m != NoSymbol) + m.moduleClass.addAnnotation(AnnotationInfo(ann.atp, ann.args, List())) + } + } + treeCopy.ClassDef(cdef, typedMods, cdef.name, tparams1, impl2) + .setType(NoType) + } finally { + currentRun.profiler.afterTypedImplDef(clazz) + } } def typedModuleDef(mdef: ModuleDef): Tree = { @@ -1855,31 +1860,37 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (linkedClass != NoSymbol) linkedClass.info.decl(nme.CONSTRUCTOR).alternatives foreach (_.initialize) - val clazz = mdef.symbol.moduleClass - val typedMods = typedModifiers(mdef.mods) - assert(clazz != NoSymbol, mdef) - val noSerializable = ( - (linkedClass eq NoSymbol) - || linkedClass.isErroneous - || !linkedClass.isSerializable - || clazz.isSerializable - ) - val impl1 = newTyper(context.make(mdef.impl, clazz, newScope)).typedTemplate(mdef.impl, { - typedParentTypes(mdef.impl) ++ ( - if (noSerializable) Nil - else { - clazz.makeSerializable() - TypeTree(SerializableTpe).setPos(clazz.pos.focus) :: Nil - } - ) - }) + val clazz = mdef.symbol.moduleClass + currentRun.profiler.beforeTypedImplDef(clazz) + try { - val impl2 = finishMethodSynthesis(impl1, clazz, context) + val typedMods = typedModifiers(mdef.mods) + assert(clazz != NoSymbol, mdef) + val noSerializable = ( + (linkedClass eq NoSymbol) + || linkedClass.isErroneous + || !linkedClass.isSerializable + || clazz.isSerializable + ) + val impl1 = newTyper(context.make(mdef.impl, clazz, newScope)).typedTemplate(mdef.impl, { + typedParentTypes(mdef.impl) ++ ( + if (noSerializable) Nil + else { + clazz.makeSerializable() + TypeTree(SerializableTpe).setPos(clazz.pos.focus) :: Nil + } + ) + }) - if (settings.isScala211 && mdef.symbol == PredefModule) - ensurePredefParentsAreInSameSourceFile(impl2) + val impl2 = finishMethodSynthesis(impl1, clazz, context) - treeCopy.ModuleDef(mdef, typedMods, mdef.name, impl2) setType NoType + if (settings.isScala211 && mdef.symbol == PredefModule) + ensurePredefParentsAreInSameSourceFile(impl2) + + treeCopy.ModuleDef(mdef, typedMods, mdef.name, impl2) setType NoType + } finally { + currentRun.profiler.afterTypedImplDef(clazz) + } } private def ensurePredefParentsAreInSameSourceFile(template: Template) = { @@ -2027,13 +2038,18 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typedValDef(vdef: ValDef): ValDef = { val sym = vdef.symbol - val valDefTyper = { - val maybeConstrCtx = - if ((sym.isParameter || sym.isEarlyInitialized) && sym.owner.isConstructor) context.makeConstructorContext - else context - newTyper(maybeConstrCtx.makeNewScope(vdef, sym)) + currentRun.profiler.beforeTypedImplDef(sym) + try { + val valDefTyper = { + val maybeConstrCtx = + if ((sym.isParameter || sym.isEarlyInitialized) && sym.owner.isConstructor) context.makeConstructorContext + else context + newTyper(maybeConstrCtx.makeNewScope(vdef, sym)) + } + valDefTyper.typedValDefImpl(vdef) + } finally { + currentRun.profiler.afterTypedImplDef(sym) } - valDefTyper.typedValDefImpl(vdef) } // use typedValDef instead. this version is called after creating a new context for the ValDef @@ -2252,89 +2268,92 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } def typedDefDef(ddef: DefDef): DefDef = { - // an accessor's type completer may mutate a type inside `ddef` (`== context.unit.synthetics(ddef.symbol)`) - // concretely: it sets the setter's parameter type or the getter's return type (when derived from a valdef with empty tpt) val meth = ddef.symbol.initialize + currentRun.profiler.beforeTypedImplDef(meth) + try { - reenterTypeParams(ddef.tparams) - reenterValueParams(ddef.vparamss) + reenterTypeParams(ddef.tparams) + reenterValueParams(ddef.vparamss) - // for `val` and `var` parameter, look at `target` meta-annotation - if (!isPastTyper && meth.isPrimaryConstructor) { - for (vparams <- ddef.vparamss; vd <- vparams) { - if (vd.mods.isParamAccessor) { - vd.symbol setAnnotations (vd.symbol.annotations filter AnnotationInfo.mkFilter(ParamTargetClass, defaultRetention = true)) + // for `val` and `var` parameter, look at `target` meta-annotation + if (!isPastTyper && meth.isPrimaryConstructor) { + for (vparams <- ddef.vparamss; vd <- vparams) { + if (vd.mods.isParamAccessor) { + vd.symbol setAnnotations (vd.symbol.annotations filter AnnotationInfo.mkFilter(ParamTargetClass, defaultRetention = true)) + } } } - } - val tparams1 = ddef.tparams mapConserve typedTypeDef - val vparamss1 = ddef.vparamss mapConserve (_ mapConserve typedValDef) + val tparams1 = ddef.tparams mapConserve typedTypeDef + val vparamss1 = ddef.vparamss mapConserve (_ mapConserve typedValDef) - warnTypeParameterShadow(tparams1, meth) + warnTypeParameterShadow(tparams1, meth) - meth.annotations.map(_.completeInfo()) + meth.annotations.map(_.completeInfo()) - for (vparams1 <- vparamss1; vparam1 <- vparams1 dropRight 1) - if (isRepeatedParamType(vparam1.symbol.tpe)) - StarParamNotLastError(vparam1) + for (vparams1 <- vparamss1; vparam1 <- vparams1 dropRight 1) + if (isRepeatedParamType(vparam1.symbol.tpe)) + StarParamNotLastError(vparam1) - val tpt1 = checkNoEscaping.privates(this, meth, typedType(ddef.tpt)) - checkNonCyclic(ddef, tpt1) - ddef.tpt.setType(tpt1.tpe) - val typedMods = typedModifiers(ddef.mods) - var rhs1 = - if (ddef.name == nme.CONSTRUCTOR && !ddef.symbol.hasStaticFlag) { // need this to make it possible to generate static ctors - if (!meth.isPrimaryConstructor && + val tpt1 = checkNoEscaping.privates(this, meth, typedType(ddef.tpt)) + checkNonCyclic(ddef, tpt1) + ddef.tpt.setType(tpt1.tpe) + val typedMods = typedModifiers(ddef.mods) + var rhs1 = + if (ddef.name == nme.CONSTRUCTOR && !ddef.symbol.hasStaticFlag) { // need this to make it possible to generate static ctors + if (!meth.isPrimaryConstructor && (!meth.owner.isClass || - meth.owner.isModuleClass || - meth.owner.isAnonOrRefinementClass)) - InvalidConstructorDefError(ddef) - typed(ddef.rhs) - } else if (meth.isMacro) { - // typechecking macro bodies is sort of unconventional - // that's why we employ our custom typing scheme orchestrated outside of the typer - transformedOr(ddef.rhs, typedMacroBody(this, ddef)) - } else { - transformedOrTyped(ddef.rhs, EXPRmode, tpt1.tpe) - } + meth.owner.isModuleClass || + meth.owner.isAnonOrRefinementClass)) + InvalidConstructorDefError(ddef) + typed(ddef.rhs) + } else if (meth.isMacro) { + // typechecking macro bodies is sort of unconventional + // that's why we employ our custom typing scheme orchestrated outside of the typer + transformedOr(ddef.rhs, typedMacroBody(this, ddef)) + } else { + transformedOrTyped(ddef.rhs, EXPRmode, tpt1.tpe) + } - if (meth.isClassConstructor && !isPastTyper && !meth.owner.isSubClass(AnyValClass) && !meth.isJava) { - // There are no supercalls for AnyVal or constructors from Java sources, which - // would blow up in computeParamAliases; there's nothing to be computed for them - // anyway. - if (meth.isPrimaryConstructor) - computeParamAliases(meth.owner, vparamss1, rhs1) - else - checkSelfConstructorArgs(ddef, meth.owner) - } + if (meth.isClassConstructor && !isPastTyper && !meth.owner.isSubClass(AnyValClass) && !meth.isJava) { + // There are no supercalls for AnyVal or constructors from Java sources, which + // would blow up in computeParamAliases; there's nothing to be computed for them + // anyway. + if (meth.isPrimaryConstructor) + computeParamAliases(meth.owner, vparamss1, rhs1) + else + checkSelfConstructorArgs(ddef, meth.owner) + } - if (tpt1.tpe.typeSymbol != NothingClass && !context.returnsSeen && rhs1.tpe.typeSymbol != NothingClass) - rhs1 = checkDead(context, rhs1) + if (tpt1.tpe.typeSymbol != NothingClass && !context.returnsSeen && rhs1.tpe.typeSymbol != NothingClass) + rhs1 = checkDead(context, rhs1) - if (!isPastTyper && meth.owner.isClass && + if (!isPastTyper && meth.owner.isClass && meth.paramss.exists(ps => ps.exists(_.hasDefault) && isRepeatedParamType(ps.last.tpe))) - StarWithDefaultError(meth) - - if (!isPastTyper) { - val allParams = meth.paramss.flatten - for (p <- allParams) { - for (n <- p.deprecatedParamName) { - if (allParams.exists(p1 => p != p1 && (p1.name == n || p1.deprecatedParamName.exists(_ == n)))) - DeprecatedParamNameError(p, n) + StarWithDefaultError(meth) + + if (!isPastTyper) { + val allParams = meth.paramss.flatten + for (p <- allParams) { + for (n <- p.deprecatedParamName) { + if (allParams.exists(p1 => p != p1 && (p1.name == n || p1.deprecatedParamName.exists(_ == n)))) + DeprecatedParamNameError(p, n) + } } - } - if (meth.isStructuralRefinementMember) - checkMethodStructuralCompatible(ddef) + if (meth.isStructuralRefinementMember) + checkMethodStructuralCompatible(ddef) - if (meth.isImplicit && !meth.isSynthetic) meth.info.paramss match { - case List(param) :: _ if !param.isImplicit => - checkFeature(ddef.pos, currentRun.runDefinitions.ImplicitConversionsFeature, meth.toString) - case _ => + if (meth.isImplicit && !meth.isSynthetic) meth.info.paramss match { + case List(param) :: _ if !param.isImplicit => + checkFeature(ddef.pos, currentRun.runDefinitions.ImplicitConversionsFeature, meth.toString) + case _ => + } } - } - treeCopy.DefDef(ddef, typedMods, ddef.name, tparams1, vparamss1, tpt1, rhs1) setType NoType + treeCopy.DefDef(ddef, typedMods, ddef.name, tparams1, vparamss1, tpt1, rhs1) setType NoType + } finally { + currentRun.profiler.afterTypedImplDef(meth) + } } def typedTypeDef(tdef: TypeDef): TypeDef = @@ -3796,9 +3815,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper tryConst(tree, pt) } def trees2ConstArg(trees: List[Tree], pt: Type): Option[ArrayAnnotArg] = { - val args = trees.map(tree2ConstArg(_, pt)) - if (args.exists(_.isEmpty)) None - else Some(ArrayAnnotArg(args.flatten.toArray)) + traverseOpt(trees)(tree2ConstArg(_, pt)) + .map(args => ArrayAnnotArg(args.toArray)) } // begin typedAnnotation diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index 61166f4239b8..ed6d4e6625a1 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -234,7 +234,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => case _ => NoSymbol } trace("wrapping ")(defOwner(expr) -> meth) - val methdef = DefDef(meth, expr changeOwner (defOwner(expr) -> meth)) + val methdef = DefDef(meth, expr changeOwner (defOwner(expr), meth)) val moduledef = ModuleDef( obj, diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala index 411d6e01382f..35fb8e69fa28 100644 --- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala +++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala @@ -36,7 +36,7 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => // monomorphic one by introducing existentials, see scala/bug#7009 for details existentialAbstraction(throwableSym.typeParams, throwableSym.tpe) } - this withAnnotation AnnotationInfo(appliedType(ThrowsClass, throwableTpe), List(Literal(Constant(throwableTpe))), Nil) + this withAnnotation AnnotationInfo(appliedType(ThrowsClass, throwableTpe :: Nil), List(Literal(Constant(throwableTpe))), Nil) } /** Tests for, get, or remove an annotation */ diff --git a/src/reflect/scala/reflect/internal/CapturedVariables.scala b/src/reflect/scala/reflect/internal/CapturedVariables.scala index ef9646b80fa2..d59ba0f0c2de 100644 --- a/src/reflect/scala/reflect/internal/CapturedVariables.scala +++ b/src/reflect/scala/reflect/internal/CapturedVariables.scala @@ -30,7 +30,7 @@ trait CapturedVariables { self: SymbolTable => def refType(valueRef: Map[Symbol, Symbol], objectRefClass: Symbol) = if (isPrimitiveValueClass(symClass) && symClass != UnitClass) valueRef(symClass).tpe else if (erasedTypes) objectRefClass.tpe - else appliedType(objectRefClass, tpe1) + else appliedType(objectRefClass, tpe1 :: Nil) if (vble.hasAnnotation(VolatileAttr)) refType(volatileRefClass, VolatileObjectRefClass) else refType(refClass, ObjectRefClass) } diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index bf490bb5e2cd..cf3b33a6eafe 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -589,10 +589,10 @@ trait Definitions extends api.StandardDefinitions { private val symSet = new SymbolSet(seq.toList) def contains(sym: Symbol): Boolean = symSet.contains(sym) def apply(i: Int) = if (isDefinedAt(i)) seq(i - offset) else NoSymbol - def specificType(args: List[Type], others: Type*): Type = { + def specificType(args: List[Type], others: List[Type] = Nil): Type = { val arity = args.length if (!isDefinedAt(arity)) NoType - else appliedType(apply(arity), args ++ others: _*) + else appliedType(apply(arity), args ::: others) } } // would be created synthetically for the default args. We call all objects in this method from the generated code @@ -610,8 +610,8 @@ trait Definitions extends api.StandardDefinitions { /** Creators for TupleN, ProductN, FunctionN. */ def tupleType(elems: List[Type]) = TupleClass.specificType(elems) - def functionType(formals: List[Type], restpe: Type) = FunctionClass.specificType(formals, restpe) - def abstractFunctionType(formals: List[Type], restpe: Type) = AbstractFunctionClass.specificType(formals, restpe) + def functionType(formals: List[Type], restpe: Type) = FunctionClass.specificType(formals, restpe :: Nil) + def abstractFunctionType(formals: List[Type], restpe: Type) = AbstractFunctionClass.specificType(formals, restpe :: Nil) def wrapArrayMethodName(elemtp: Type): TermName = elemtp.typeSymbol match { case ByteClass => nme.wrapByteArray @@ -912,13 +912,13 @@ trait Definitions extends api.StandardDefinitions { } else NoSymbol } - def arrayType(arg: Type) = appliedType(ArrayClass, arg) - def byNameType(arg: Type) = appliedType(ByNameParamClass, arg) - def iteratorOfType(tp: Type) = appliedType(IteratorClass, tp) - def javaRepeatedType(arg: Type) = appliedType(JavaRepeatedParamClass, arg) - def optionType(tp: Type) = appliedType(OptionClass, tp) - def scalaRepeatedType(arg: Type) = appliedType(RepeatedParamClass, arg) - def seqType(arg: Type) = appliedType(SeqClass, arg) + def arrayType(arg: Type) = appliedType(ArrayClass, arg :: Nil) + def byNameType(arg: Type) = appliedType(ByNameParamClass, arg :: Nil) + def iteratorOfType(tp: Type) = appliedType(IteratorClass, tp :: Nil) + def javaRepeatedType(arg: Type) = appliedType(JavaRepeatedParamClass, arg :: Nil) + def optionType(tp: Type) = appliedType(OptionClass, tp :: Nil) + def scalaRepeatedType(arg: Type) = appliedType(RepeatedParamClass, arg :: Nil) + def seqType(arg: Type) = appliedType(SeqClass, arg :: Nil) // For name-based pattern matching, derive the "element type" (type argument of Option/Seq) // from the relevant part of the signature of various members (get/head/apply/drop) @@ -955,7 +955,9 @@ trait Definitions extends api.StandardDefinitions { } } - def ClassType(arg: Type) = if (phase.erasedTypes) ClassClass.tpe else appliedType(ClassClass, arg) + def ClassType(arg: Type) = + if (phase.erasedTypes) ClassClass.tpe + else appliedType(ClassClass, arg :: Nil) /** Can we tell by inspecting the symbol that it will never * at any phase have type parameters? @@ -1348,7 +1350,7 @@ trait Definitions extends api.StandardDefinitions { newPolyMethod(1, owner, name, flags)(tparams => (None, createFn(tparams.head))) } def newT1NoParamsMethod(owner: Symbol, name: TermName, flags: Long)(createFn: Symbol => Type): MethodSymbol = { - newPolyMethod(1, owner, name, flags)(tparams => (Some(Nil), createFn(tparams.head))) + newPolyMethod(1, owner, name, flags)(tparams => (util.SomeOfNil, createFn(tparams.head))) } /** Is symbol a phantom class for which no runtime representation exists? */ diff --git a/src/reflect/scala/reflect/internal/ReificationSupport.scala b/src/reflect/scala/reflect/internal/ReificationSupport.scala index 28b01eb59906..f6c9a7ab04e8 100644 --- a/src/reflect/scala/reflect/internal/ReificationSupport.scala +++ b/src/reflect/scala/reflect/internal/ReificationSupport.scala @@ -433,7 +433,7 @@ trait ReificationSupport { self: SymbolTable => def unapply(tree: Tree): Option[List[Tree]] = tree match { case Literal(Constant(())) => - Some(Nil) + SomeOfNil case Apply(MaybeTypeTreeOriginal(SyntacticTypeApplied(MaybeSelectApply(TupleCompanionRef(sym)), targs)), args) if sym == TupleClass(args.length).companionModule && (targs.isEmpty || targs.length == args.length) => @@ -453,7 +453,7 @@ trait ReificationSupport { self: SymbolTable => def unapply(tree: Tree): Option[List[Tree]] = tree match { case MaybeTypeTreeOriginal(UnitClassRef(_)) => - Some(Nil) + SomeOfNil case MaybeTypeTreeOriginal(AppliedTypeTree(TupleClassRef(sym), args)) if sym == TupleClass(args.length) => Some(args) @@ -507,7 +507,7 @@ trait ReificationSupport { self: SymbolTable => def unapply(tree: Tree): Option[List[Tree]] = tree match { case bl @ self.Block(stats, SyntheticUnit()) => Some(treeInfo.untypecheckedBlockBody(bl)) case bl @ self.Block(stats, expr) => Some(treeInfo.untypecheckedBlockBody(bl) :+ expr) - case SyntheticUnit() => Some(Nil) + case SyntheticUnit() => SomeOfNil case _ if tree.isTerm && tree.nonEmpty => Some(tree :: Nil) case _ => None } diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 93ff7dcf7d24..7c64405bedc4 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -14,6 +14,7 @@ import java.util.concurrent.TimeUnit import scala.collection.mutable.ArrayBuffer import scala.reflect.internal.{TreeGen => InternalTreeGen} +import scala.reflect.io.AbstractFile abstract class SymbolTable extends macros.Universe with Collections @@ -486,6 +487,9 @@ abstract class SymbolTable extends macros.Universe * Adds the `sm` String interpolator to a [[scala.StringContext]]. */ implicit val StringContextStripMarginOps: StringContext => StringContextStripMarginOps = util.StringContextStripMarginOps + + protected[scala] def currentRunProfilerBeforeCompletion(root: Symbol, associatedFile: AbstractFile): Unit = () + protected[scala] def currentRunProfilerAfterCompletion(root: Symbol, associatedFile: AbstractFile): Unit = () } trait SymbolTableStats { diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index f94e16a0afb7..4929ca23d75a 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -216,6 +216,9 @@ trait Trees extends api.Trees { } } + def changeOwner(from: Symbol, to: Symbol): Tree = + new ChangeOwnerTraverser(from, to) apply this + def shallowDuplicate: Tree = new ShallowDuplicator(this) transform this def shortClass: String = (getClass.getName split "[.$]").last diff --git a/src/reflect/scala/reflect/internal/util/ChromeTrace.scala b/src/reflect/scala/reflect/internal/util/ChromeTrace.scala new file mode 100644 index 000000000000..5b34def2d6ac --- /dev/null +++ b/src/reflect/scala/reflect/internal/util/ChromeTrace.scala @@ -0,0 +1,177 @@ +package scala.reflect.internal.util + +import java.io.Closeable +import java.lang.management.ManagementFactory +import java.nio.file.{Files, Path} +import java.util +import java.util.concurrent.TimeUnit + +import scala.collection.mutable + +object ChromeTrace { + + private object EventType { + final val Start = "B" + final val Instant = "I" + final val End = "E" + final val Complete = "X" + + final val Counter = "C" + + final val AsyncStart = "b" + final val AsyncInstant = "n" + final val AsyncEnd = "e" + } + +} + +/** Allows writing a subset of of https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/preview# + * for use in Chrome's about://tracing or the tooling in https://www.google.com.au/search?q=catapult+tracing&oq=catapult+tracing+&aqs=chrome..69i57.3974j0j4&sourceid=chrome&ie=UTF-8 */ +final class ChromeTrace(f: Path) extends Closeable { + import ChromeTrace.EventType + private val traceWriter = FileUtils.newAsyncBufferedWriter(f) + private val context = mutable.ArrayStack[JsonContext](TopContext) + private val tidCache = new ThreadLocal[String]() { + override def initialValue(): String = Thread.currentThread().getId.formatted("%05d") + } + objStart() + fld("traceEvents") + context.push(ValueContext) + arrStart() + traceWriter.newLine() + + private val pid = ManagementFactory.getRuntimeMXBean().getName().replaceAll("@.*", "") + + override def close(): Unit = { + arrEnd() + objEnd() + context.pop() + tidCache.remove() + traceWriter.close() + } + + def traceDurationEvent(name: String, startNanos: Long, durationNanos: Long, tid: String = this.tid(), pidSuffix: String = ""): Unit = { + val durationMicros = nanosToMicros(durationNanos) + val startMicros = nanosToMicros(startNanos) + objStart() + str("cat", "scalac") + str("name", name) + str("ph", EventType.Complete) + str("tid", tid) + writePid(pidSuffix) + lng("ts", startMicros) + lng("dur", durationMicros) + objEnd() + traceWriter.newLine() + } + + private def writePid(pidSuffix: String) = { + if (pidSuffix == "") + str("pid", pid) + else + str2("pid", pid, "-", pidSuffix) + } + + def traceCounterEvent(name: String, counterName: String, count: Long, processWide: Boolean): Unit = { + objStart() + str("cat", "scalac") + str("name", name) + str("ph", EventType.Counter) + str("tid", tid()) + writePid(pidSuffix = if (processWide) "" else tid()) + lng("ts", microTime()) + fld("args") + objStart() + lng(counterName, count) + objEnd() + objEnd() + traceWriter.newLine() + } + + def traceDurationEventStart(cat: String, name: String, colour: String = "", pidSuffix: String = tid()): Unit = traceDurationEventStartEnd(EventType.Start, cat, name, colour, pidSuffix) + def traceDurationEventEnd(cat: String, name: String, colour: String = "", pidSuffix: String = tid()): Unit = traceDurationEventStartEnd(EventType.End, cat, name, colour, pidSuffix) + + private def traceDurationEventStartEnd(eventType: String, cat: String, name: String, colour: String, pidSuffix: String = ""): Unit = { + objStart() + str("cat", cat) + str("name", name) + str("ph", eventType) + writePid(pidSuffix) + str("tid", tid()) + lng("ts", microTime()) + if (colour != "") { + str("cname", colour) + } + objEnd() + traceWriter.newLine() + } + + private def tid(): String = tidCache.get() + + private def nanosToMicros(t: Long): Long = TimeUnit.NANOSECONDS.toMicros(t) + + private def microTime(): Long = nanosToMicros(System.nanoTime()) + + sealed abstract class JsonContext + case class ArrayContext(var first: Boolean) extends JsonContext + case class ObjectContext(var first: Boolean) extends JsonContext + case object ValueContext extends JsonContext + case object TopContext extends JsonContext + + private def str(name: String, value: String): Unit = { + fld(name) + traceWriter.write("\"") + traceWriter.write(value) // This assumes no escaping is needed + traceWriter.write("\"") + } + private def str2(name: String, value: String, valueContinued1: String, valueContinued2: String): Unit = { + fld(name) + traceWriter.write("\"") + traceWriter.write(value) // This assumes no escaping is needed + traceWriter.write(valueContinued1) // This assumes no escaping is needed + traceWriter.write(valueContinued2) // This assumes no escaping is needed + traceWriter.write("\"") + } + private def lng(name: String, value: Long): Unit = { + fld(name) + traceWriter.write(String.valueOf(value)) + traceWriter.write("") + } + private def objStart(): Unit = { + context.top match { + case ac @ ArrayContext(first) => + if (first) ac.first = false + else traceWriter.write(",") + case _ => + } + context.push(ObjectContext(true)) + traceWriter.write("{") + } + private def objEnd(): Unit = { + traceWriter.write("}") + context.pop() + } + private def arrStart(): Unit = { + traceWriter.write("[") + context.push(ArrayContext(true)) + } + private def arrEnd(): Unit = { + traceWriter.write("]") + context.pop() + } + + private def fld(name: String) = { + val topContext = context.top + topContext match { + case oc @ ObjectContext(first) => + if (first) oc.first = false + else traceWriter.write(",") + case context => + throw new IllegalStateException("Wrong context: " + context) + } + traceWriter.write("\"") + traceWriter.write(name) + traceWriter.write("\"") + traceWriter.write(":") + } +} diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala index 970a5d300f8f..11d10128d1fc 100644 --- a/src/reflect/scala/reflect/internal/util/Collections.scala +++ b/src/reflect/scala/reflect/internal/util/Collections.scala @@ -288,10 +288,23 @@ trait Collections { true } - final def sequence[A](as: List[Option[A]]): Option[List[A]] = { - if (as.exists (_.isEmpty)) None - else Some(as.flatten) - } + // "Opt" suffix or traverse clashes with the various traversers' traverses + final def sequenceOpt[A](as: List[Option[A]]): Option[List[A]] = traverseOpt(as)(identity) + final def traverseOpt[A, B](as: List[A])(f: A => Option[B]): Option[List[B]] = + if (as eq Nil) SomeOfNil else { + var result: ListBuffer[B] = null + var curr = as + while (curr ne Nil) { + f(curr.head) match { + case Some(b) => + if (result eq null) result = ListBuffer.empty + result += b + case None => return None + } + curr = curr.tail + } + Some(result.toList) + } final def transposeSafe[A](ass: List[List[A]]): Option[List[List[A]]] = try { Some(ass.transpose) diff --git a/src/reflect/scala/reflect/internal/util/FileUtils.scala b/src/reflect/scala/reflect/internal/util/FileUtils.scala new file mode 100644 index 000000000000..a99a713d40f0 --- /dev/null +++ b/src/reflect/scala/reflect/internal/util/FileUtils.scala @@ -0,0 +1,190 @@ +package scala.reflect.internal.util + +import java.io.{BufferedWriter, IOException, OutputStreamWriter, Writer} +import java.nio.CharBuffer +import java.nio.charset.{Charset, CharsetEncoder, StandardCharsets} +import java.nio.file.{Files, OpenOption, Path} +import java.util.concurrent.LinkedBlockingQueue +import java.util.concurrent.atomic.AtomicBoolean + +import sun.security.action.GetPropertyAction + +import scala.concurrent.duration.Duration +import scala.concurrent.{Await, Promise} +import scala.util.{Failure, Success} + +object FileUtils { + def newAsyncBufferedWriter(path: Path, charset: Charset = StandardCharsets.UTF_8, options: Array[OpenOption] = NO_OPTIONS, threadsafe: Boolean = false): LineWriter = { + val encoder: CharsetEncoder = charset.newEncoder + val writer = new OutputStreamWriter(Files.newOutputStream(path, options: _*), encoder) + newAsyncBufferedWriter(new BufferedWriter(writer), threadsafe) + } + def newAsyncBufferedWriter(underlying: Writer, threadsafe: Boolean): LineWriter = { + val async = new AsyncBufferedWriter(underlying) + if (threadsafe) new ThreadsafeWriter(async) else async + } + private val NO_OPTIONS = new Array[OpenOption](0) + + private val lineSeparator = java.security.AccessController.doPrivileged(new GetPropertyAction("line.separator")) + + sealed abstract class LineWriter extends Writer { + def newLine(): Unit + } + private class ThreadsafeWriter(val underlying: AsyncBufferedWriter) extends LineWriter { + lock = underlying + override def write(c: Int): Unit = + lock.synchronized (underlying.write(c)) + + override def write(cbuf: Array[Char]): Unit = + lock.synchronized (underlying.write(cbuf)) + + override def write(cbuf: Array[Char], off: Int, len: Int): Unit = + lock.synchronized (underlying.write(cbuf, off, len)) + + override def write(str: String): Unit = + lock.synchronized (underlying.write(str)) + + override def write(str: String, off: Int, len: Int): Unit = + lock.synchronized (underlying.write(str, off, len)) + + override def flush(): Unit = + lock.synchronized (underlying.flush()) + + override def close(): Unit = + lock.synchronized (underlying.close()) + + override def newLine(): Unit = + lock.synchronized (underlying.newLine()) + + } + + private object AsyncBufferedWriter { + private val Close = CharBuffer.allocate(0) + private val Flush = CharBuffer.allocate(0) + } + private class AsyncBufferedWriter(val underlying: Writer, bufferSize : Int = 4096) extends LineWriter { + private var current: CharBuffer = allocate + override def write(c: Int): Unit = super.write(c) + private def flushAsync(): Unit = { + background.ensureProcessed(current) + current = allocate + } +// allocate or reuse a CharArray which is guaranteed to have a backing array + private def allocate: CharBuffer = { + val reused = background.reuseBuffer + if (reused eq null) CharBuffer.allocate(bufferSize) + else { + //we don't care about race conditions + background.reuseBuffer = null + reused.clear() + reused + } + } + + override def write(cbuf: Array[Char], initialOffset: Int, initialLength: Int): Unit = { + var offset = initialOffset + var length = initialLength + while (length > 0) { + val capacity = current.remaining() + if (length <= capacity) { + current.put(cbuf, offset, length) + length = 0 + } else { + current.put(cbuf, offset, capacity) + flushAsync() + length -= capacity + offset += capacity + } + } + } + + override def write(s: String, initialOffset: Int, initialLength: Int): Unit = { + var offset = initialOffset + var length = initialLength + while (length > 0) { + val capacity = current.remaining() + if (length <= capacity) { + current.put(s, offset, offset + length) + length = 0 + } else { + current.put(s, offset, offset + capacity) + flushAsync() + length -= capacity + offset += capacity + } + } + } + + def newLine(): Unit = write(lineSeparator) + + /** slightly breaks the flush contract in that the flush is not complete when the method returns */ + override def flush(): Unit = { + flushAsync() + } + + override def close(): Unit = { + background.ensureProcessed(current) + background.ensureProcessed(AsyncBufferedWriter.Close) + current = null + Await.result(background.asyncStatus.future, Duration.Inf) + underlying.close() + } + private object background extends Runnable{ + + import scala.concurrent.ExecutionContext.Implicits.global + + private val pending = new LinkedBlockingQueue[CharBuffer] + //a failure detected will case an Failure, Success indicates a close + val asyncStatus = Promise[Unit]() + private val scheduled = new AtomicBoolean + @volatile var reuseBuffer: CharBuffer = _ + + def ensureProcessed(buffer: CharBuffer): Unit = { + if (asyncStatus.isCompleted) { + asyncStatus.future.value.get match { + case Success(()) => throw new IllegalStateException("closed") + case Failure(t) => throw new IOException("async failure", t) + } + } + + //order is essential - add to the queue before the CAS + pending.add(buffer) + if (scheduled.compareAndSet(false, true)) { + global.execute(background) + } + } + + def run(): Unit = { + try { + while (!pending.isEmpty) { + val next = pending.poll() + if (next eq AsyncBufferedWriter.Flush) { + underlying.flush() + } else if (next eq AsyncBufferedWriter.Close) { + underlying.flush() + underlying.close() + asyncStatus.trySuccess(()) + } else { + val array = next.array() + next.flip() + underlying.write(array, next.arrayOffset() + next.position(), next.limit()) + reuseBuffer = next + } + } + } catch { + case t: Throwable => + asyncStatus.tryFailure(t) + throw t + } + finally scheduled.set(false) + + //we are not scheduled any more + //as a last check ensure that we didnt race with an addition to the queue + //order is essential - queue is checked before CAS + if ((!pending.isEmpty) && scheduled.compareAndSet(false, true)) { + global.execute(background) + } + } + } + } +} diff --git a/src/reflect/scala/reflect/internal/util/package.scala b/src/reflect/scala/reflect/internal/util/package.scala index 9b5fd3798d70..cbffe587f6b8 100644 --- a/src/reflect/scala/reflect/internal/util/package.scala +++ b/src/reflect/scala/reflect/internal/util/package.scala @@ -8,6 +8,7 @@ package object util { // An allocation-avoiding reusable instance of the so-common List(Nil). val ListOfNil: List[List[Nothing]] = Nil :: Nil + val SomeOfNil: Option[List[Nothing]] = Some(Nil) def andFalse(body: Unit): Boolean = false diff --git a/test/files/pos/t10786/Bar_2.scala b/test/files/pos/t10786/Bar_2.scala new file mode 100644 index 000000000000..37f0809ebf07 --- /dev/null +++ b/test/files/pos/t10786/Bar_2.scala @@ -0,0 +1,27 @@ +package pkg { + class C { + class T1 extends Foo_1.StaticClass + class T2 extends Foo_1.ProtectedStaticClass + def test(): Unit = { + val n1 = new Foo_1.StaticClass + n1.x + Foo_1.StaticClass.y + val n2 = new Foo_1.ProtectedStaticClass + n2.x + Foo_1.ProtectedStaticClass.y + } + + class I extends Foo_1 { + class T1 extends Foo_1.StaticClass + class T2 extends Foo_1.ProtectedStaticClass + def test(): Unit = { + val n1 = new Foo_1.StaticClass + n1.x + Foo_1.StaticClass.y + val n2 = new Foo_1.ProtectedStaticClass + n2.x + Foo_1.ProtectedStaticClass.y + } + } + } +} diff --git a/test/files/pos/t10786/Foo_1.java b/test/files/pos/t10786/Foo_1.java new file mode 100644 index 000000000000..bdda66de5b06 --- /dev/null +++ b/test/files/pos/t10786/Foo_1.java @@ -0,0 +1,12 @@ +package pkg; + +public class Foo_1 { + static class StaticClass { + public int x = 1; + public static int y = 1; + } + protected static class ProtectedStaticClass { + public int x = 1; + public static int y = 1; + } +} diff --git a/test/junit/scala/reflect/internal/util/FileUtilsTest.scala b/test/junit/scala/reflect/internal/util/FileUtilsTest.scala new file mode 100644 index 000000000000..21eba42985b9 --- /dev/null +++ b/test/junit/scala/reflect/internal/util/FileUtilsTest.scala @@ -0,0 +1,89 @@ +package scala.reflect.internal.util + +import java.io._ + +import org.junit.Assert._ +import org.junit._ + +class FileUtilsTest { + + @Test def writeIsSame(): Unit = { + val fileTest = File.createTempFile("FileUtilsTest", "t1") + val fileExpected = File.createTempFile("FileUtilsTest", "t2") + + val sTest = FileUtils.newAsyncBufferedWriter(new FileWriter(fileTest), false) + val sExpected = new BufferedWriter(new FileWriter(fileExpected)) + + def writeBoth(s:String, asChars: Boolean) = { + if (asChars) { + sTest.write(s.toCharArray) + sExpected.write(s.toCharArray) + } else { + sTest.write(s) + sExpected.write(s) + } + } + + for (i <- 1 to 2000) { + writeBoth(s"line $i text;", true) + writeBoth(s"line $i chars", false) + sTest.newLine + sExpected.newLine + } + sTest.close() + sExpected.close() + + assertEquals(fileExpected.length(),fileTest.length()) + + val expIn = new BufferedReader(new FileReader(fileExpected)) + val testIn = new BufferedReader(new FileReader(fileTest)) + + var exp = expIn.readLine() + while (exp ne null) { + val actual = testIn.readLine() + assertEquals(exp, actual) + exp = expIn.readLine() + } + expIn.close() + testIn.close() + fileTest.delete() + fileExpected.delete() + } + + @Test def showPerformance: Unit = { + //warmup + for (i <- 1 to 1000) { + writeIsSame() + } + + val fileTest = File.createTempFile("FileUtilsTest", "t1") + val fileExpected = File.createTempFile("FileUtilsTest", "t2") + + for (i <- 1 to 10) { + val sTest = FileUtils.newAsyncBufferedWriter(fileTest.toPath) + val sExpected = new BufferedWriter(new FileWriter(fileExpected)) + + val t1 = System.nanoTime() + List.tabulate(10000) {i => + sTest.write(s"line $i text;") + sTest.newLine + } + val t2 = System.nanoTime() + sTest.close() + val t3 = System.nanoTime() + List.tabulate(10000) {i => + sExpected.write(s"line $i text;") + sExpected.newLine + } + val t4 = System.nanoTime() + sExpected.close() + + println(s"async took ${t2 - t1} ns") + println(s"buffered took ${t4 - t3} ns") + + fileTest.delete() + fileExpected.delete() + } + } + +} diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala index 4af8b317a833..da7dcc68131b 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala @@ -9,6 +9,7 @@ import org.junit.runners.JUnit4 import scala.collection.JavaConverters._ import scala.collection.immutable.IntMap +import scala.reflect.internal.util.JavaClearable import scala.tools.asm.tree._ import scala.tools.nsc.backend.jvm.BackendReporting._ import scala.tools.nsc.reporters.StoreReporter @@ -24,7 +25,7 @@ class CallGraphTest extends BytecodeTesting { compiler.keepPerRunCachesAfterRun(List( - bTypes.classBTypeCache, + JavaClearable.forMap(bTypes.classBTypeCache), postProcessor.byteCodeRepository.compilingClasses, postProcessor.byteCodeRepository.parsedClasses, postProcessor.callGraph.callsites)) @@ -142,7 +143,7 @@ class CallGraphTest extends BytecodeTesting { val m = getAsmMethod(c, "m") val List(fn) = callsInMethod(m) val forNameMeth = byteCodeRepository.methodNode("java/lang/Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;").get._1 - val classTp = cachedClassBType("java/lang/Class").get + val classTp = cachedClassBType("java/lang/Class") val r = callGraph.callsites(m)(fn) checkCallsite(fn, m, forNameMeth, classTp, safeToInline = false, atInline = false, atNoInline = false) } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala index 1f1eace35073..ab750855aeff 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala @@ -8,6 +8,7 @@ import org.junit.runner.RunWith import org.junit.runners.JUnit4 import scala.collection.JavaConverters._ +import scala.reflect.internal.util.JavaClearable import scala.tools.nsc.backend.jvm.BTypes.MethodInlineInfo import scala.tools.nsc.backend.jvm.BackendReporting._ import scala.tools.testing.BytecodeTesting @@ -20,7 +21,7 @@ class InlineInfoTest extends BytecodeTesting { override def compilerArgs = "-opt:l:inline -opt-inline-from:**" compiler.keepPerRunCachesAfterRun(List( - bTypes.classBTypeCache, + JavaClearable.forMap(bTypes.classBTypeCache), postProcessor.byteCodeRepository.compilingClasses, postProcessor.byteCodeRepository.parsedClasses)) @@ -45,7 +46,7 @@ class InlineInfoTest extends BytecodeTesting { """.stripMargin val classes = compileClasses(code) - val fromSyms = classes.map(c => global.genBCode.bTypes.cachedClassBType(c.name).get.info.get.inlineInfo) + val fromSyms = classes.map(c => global.genBCode.bTypes.cachedClassBType(c.name).info.get.inlineInfo) val fromAttrs = classes.map(c => { assert(c.attrs.asScala.exists(_.isInstanceOf[InlineInfoAttribute]), c.attrs) @@ -64,7 +65,7 @@ class InlineInfoTest extends BytecodeTesting { |} """.stripMargin compileClasses("class C { new A }", javaCode = List((jCode, "A.java"))) - val info = global.genBCode.bTypes.cachedClassBType("A").get.info.get.inlineInfo + val info = global.genBCode.bTypes.cachedClassBType("A").info.get.inlineInfo assertEquals(info.methodInfos, Map( "bar()I" -> MethodInlineInfo(true,false,false), "()V" -> MethodInlineInfo(false,false,false), @@ -85,7 +86,7 @@ class InlineInfoTest extends BytecodeTesting { compileClasses("class C { def t: java.nio.file.WatchEvent.Kind[String] = null }", javaCode = List((jCode, "WatchEvent.java"))) // before the fix of scala-dev#402, the companion of the nested class `Kind` (containing the static method) was taken from // the classpath (classfile WatchEvent$Kind.class) instead of the actual companion from the source, so the static method was missing. - val info = global.genBCode.bTypes.cachedClassBType("java/nio/file/WatchEvent$Kind").get.info.get.inlineInfo + val info = global.genBCode.bTypes.cachedClassBType("java/nio/file/WatchEvent$Kind").info.get.inlineInfo assertEquals(info.methodInfos, Map( "HAI()Ljava/lang/String;" -> MethodInlineInfo(true,false,false), "()V" -> MethodInlineInfo(false,false,false))) diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index 61fecada673e..0d4408998989 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -8,6 +8,7 @@ import org.junit.runner.RunWith import org.junit.runners.JUnit4 import scala.collection.JavaConverters._ +import scala.reflect.internal.util.JavaClearable import scala.tools.asm.Opcodes._ import scala.tools.asm.tree._ import scala.tools.nsc.backend.jvm.BackendReporting._ @@ -26,7 +27,7 @@ class InlinerTest extends BytecodeTesting { compiler.keepPerRunCachesAfterRun(List( - bTypes.classBTypeCache, + JavaClearable.forMap(bTypes.classBTypeCache), postProcessor.byteCodeRepository.compilingClasses, postProcessor.byteCodeRepository.parsedClasses, postProcessor.callGraph.callsites))