Skip to content

Make source a field in Positioned nodes #9900

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 10 commits into from
Oct 3, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/Driver.scala
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ class Driver {
val summary = CompilerCommand.distill(args)(using ictx)
ictx.setSettings(summary.sstate)
MacroClassLoader.init(ictx)
Positioned.updateDebugPos(using ictx)
Positioned.init(using ictx)

inContext(ictx) {
if !ctx.settings.YdropComments.value || ctx.mode.is(Mode.ReadComments) then
Expand Down
42 changes: 25 additions & 17 deletions compiler/src/dotty/tools/dotc/ast/Positioned.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,34 +19,36 @@ import java.io.{ PrintWriter }
/** A base class for things that have positions (currently: modifiers and trees)
*/
abstract class Positioned(implicit @constructorOnly src: SourceFile) extends SrcPos, Product, Cloneable {
import Positioned.{ids, nextId, debugId}

private var myUniqueId: Int = _
private var mySpan: Span = _

/** A unique identifier. Among other things, used for determining the source file
* component of the position.
/** A unique identifier in case -Yshow-tree-ids, or -Ydebug-tree-with-id
* is set, -1 otherwise.
*/
def uniqueId: Int = myUniqueId
def uniqueId: Int =
if ids != null && ids.containsKey(this) then ids.get(this) else -1

def uniqueId_=(id: Int): Unit = {
def printTrace() = {
println(s"Debug tree (id=${Positioned.debugId}) creation \n$this\n")
Reporter.displayPrompt(Console.in, new PrintWriter(Console.err, true))
}
if (Positioned.debugId == id) printTrace()
myUniqueId = id
}
private def allocateId() =
if ids != null then
val ownId = nextId
nextId += 1
ids.put(this, ownId)
if ownId == debugId then
println(s"Debug tree (id=$debugId) creation \n$this\n")
Reporter.displayPrompt(Console.in, new PrintWriter(Console.err, true))

allocateId()

/** The span part of the item's position */
def span: Span = mySpan

def span_=(span: Span): Unit =
mySpan = span

uniqueId = src.nextId
span = envelope(src)

def source: SourceFile = SourceFile.fromId(uniqueId)
val source: SourceFile = src
def sourcePos(using Context): SourcePosition = source.atSpan(span)

/** This positioned item, widened to `SrcPos`. Used to make clear we only need the
Expand Down Expand Up @@ -125,7 +127,7 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src
/** Clone this node but assign it a fresh id which marks it as a node in `file`. */
def cloneIn(src: SourceFile): this.type = {
val newpd: this.type = clone.asInstanceOf[this.type]
newpd.uniqueId = src.nextId
newpd.allocateId()
// assert(newpd.uniqueId != 2208, s"source = $this, ${this.uniqueId}, ${this.span}")
newpd
}
Expand Down Expand Up @@ -237,8 +239,14 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src
}

object Positioned {
@sharable private[Positioned] var debugId = Int.MinValue
@sharable private var debugId = Int.MinValue
@sharable private var ids: java.util.WeakHashMap[Positioned, Int] = null
@sharable private var nextId: Int = 0

def updateDebugPos(using Context): Unit =
def init(using Context): Unit =
debugId = ctx.settings.YdebugTreeWithId.value
if ids == null && ctx.settings.YshowTreeIds.value
|| debugId != ctx.settings.YdebugTreeWithId.default
then
ids = java.util.WeakHashMap()
}
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/ast/Trees.scala
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ object Trees {
}
}

override def hashCode(): Int = uniqueId // for debugging; was: System.identityHashCode(this)
override def hashCode(): Int = System.identityHashCode(this)
override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
}

Expand Down
27 changes: 13 additions & 14 deletions compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ import dotty.tools.tasty.TastyBuffer
import TastyBuffer.{Addr, NoAddr, AddrWidth}

import util.Util.bestFit
import util.SparseIntArray
import config.Printers.pickling
import ast.untpd.Tree

Expand All @@ -21,23 +20,20 @@ class TreeBuffer extends TastyBuffer(50000) {
private var delta: Array[Int] = _
private var numOffsets = 0

/** A map from tree unique ids to the address index at which a tree is pickled.
* Note that trees are looked up by reference equality,
* so one can reliably use this function only directly after `pickler`.
*/
private val addrOfTree = SparseIntArray()
/** A map from trees to the address at which a tree is pickled. */
private val treeAddrs = util.IntMap[Tree](initialCapacity = 8192)

def registerTreeAddr(tree: Tree): Addr =
val id = tree.uniqueId
if addrOfTree.contains(id) then Addr(addrOfTree(id))
else
addrOfTree(tree.uniqueId) = currentAddr.index
val idx = treeAddrs(tree)
if idx < 0 then
treeAddrs(tree) = currentAddr.index
currentAddr
else
Addr(idx)

def addrOfTree(tree: Tree): Addr =
val idx = tree.uniqueId
if addrOfTree.contains(idx) then Addr(addrOfTree(idx))
else NoAddr
val idx = treeAddrs(tree)
if idx < 0 then NoAddr else Addr(idx)

private def offset(i: Int): Addr = Addr(offsets(i))

Expand Down Expand Up @@ -163,7 +159,10 @@ class TreeBuffer extends TastyBuffer(50000) {
}

def adjustTreeAddrs(): Unit =
addrOfTree.transform((id, addr) => adjusted(Addr(addr)).index)
var i = 0
while i < treeAddrs.size do
treeAddrs.setValue(i, adjusted(Addr(treeAddrs.value(i))).index)
i += 1

/** Final assembly, involving the following steps:
* - compute deltas
Expand Down
28 changes: 22 additions & 6 deletions compiler/src/dotty/tools/dotc/typer/Inliner.scala
Original file line number Diff line number Diff line change
Expand Up @@ -70,8 +70,6 @@ object Inliner {
* and body that replace it.
*/
def inlineCall(tree: Tree)(using Context): Tree = {
val startId = ctx.source.nextId

if tree.symbol.denot != SymDenotations.NoDenotation
&& tree.symbol.owner.companionModule == defn.CompiletimeTestingPackageObject
then
Expand Down Expand Up @@ -136,10 +134,6 @@ object Inliner {
|You can use ${setting.name} to change the limit.""",
(tree :: enclosingInlineds).last.srcPos
)

val endId = ctx.source.nextId
addInlinedTrees(endId - startId)

tree2
}

Expand Down Expand Up @@ -735,6 +729,26 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
case _ =>
}

/** The number of nodes in this tree, excluding code in nested inline
* calls and annotations of definitions.
*/
def treeSize(x: Any): Int =
var siz = 0
x match
case x: Trees.Inlined[_] =>
case x: Positioned =>
var i = 0
while i < x.productArity do
siz += treeSize(x.productElement(i))
i += 1
case x: List[_] =>
var xs = x
while xs.nonEmpty do
siz += treeSize(xs.head)
xs = xs.tail
case _ =>
siz

trace(i"inlining $call", inlining, show = true) {

// The normalized bindings collected in `bindingsBuf`
Expand All @@ -758,6 +772,8 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {

if (inlinedMethod == defn.Compiletime_error) issueError()

addInlinedTrees(treeSize(finalExpansion))

// Take care that only argument bindings go into `bindings`, since positions are
// different for bindings from arguments and bindings from body.
tpd.Inlined(call, finalBindings, finalExpansion)
Expand Down
57 changes: 57 additions & 0 deletions compiler/src/dotty/tools/dotc/util/IntMap.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
package dotty.tools.dotc.util

/** A dense map from some `Key` type to `Int. Dense means: All keys and values
* are stored in arrays from 0 up to the size of the map. Keys and values
* can be obtained by index using `key(index)` and `value(index)`. Values
* can also be stored using `setValue(index, value)`.
*
* ome privileged protected access to its internals
* @param initialCapacity Indicates the initial number of slots in the hash table.
* The actual number of slots is always a power of 2, so the
* initial size of the table will be the smallest power of two
* that is equal or greater than the given `initialCapacity`.
* Minimum value is 4.
* @param capacityMultiple The minimum multiple of capacity relative to used elements.
* The hash table will be re-sized once the number of elements
* multiplied by capacityMultiple exceeds the current size of the hash table.
* However, a table of size up to DenseLimit will be re-sized only
* once the number of elements reaches the table's size.
*/
final class IntMap[Key](initialCapacity: Int = 8, capacityMultiple: Int = 2)
extends PerfectHashing[Key](initialCapacity, capacityMultiple):
private var values: Array[Int] = _

def default: Int = -1

protected override def allocate(capacity: Int) =
super.allocate(capacity)
values = new Array[Int](capacity)

/** The value associated with key `k`, or else `default`. */
def apply(k: Key): Int =
val idx = index(k)
if idx < 0 then default else values(idx)

/** Associate key `k` with value `v` */
def update(k: Key, v: Int): Unit =
val idx = add(k) // don't merge the two statements, `add` might change `values`.
values(idx) = v

protected override def growTable() =
val oldValues = values
super.growTable()
Array.copy(oldValues, 0, values, 0, oldValues.length)

def valuesIterator = values.iterator.take(size)

def iterator: Iterator[(Key, Int)] = keysIterator.zip(valuesIterator)

/** The value stored at index `i` */
def value(i: Int) = values(i)

/** Change the value stored at index `i` to `v` */
def setValue(i: Int, v: Int) = values(i) = v

override def toString =
iterator.map((k, v) => s"$k -> $v").mkString("IntMap(", ", ", ")")
end IntMap
136 changes: 136 additions & 0 deletions compiler/src/dotty/tools/dotc/util/PerfectHashing.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
package dotty.tools.dotc.util

object PerfectHashing:

/** The number of elements up to which dense packing is used.
* If the number of elements reaches `DenseLimit` a hash table is used instead
*/
inline val DenseLimit = 16

/** A map that maps keys to unique integers in a dense interval starting at 0.
* @param initialCapacity Indicates the initial number of slots in the hash table.
* The actual number of slots is always a power of 2, so the
* initial size of the table will be the smallest power of two
* that is equal or greater than the given `initialCapacity`.
* Minimum value is 4.
* @param capacityMultiple The minimum multiple of capacity relative to used elements.
* The hash table will be re-sized once the number of elements
* multiplied by capacityMultiple exceeds the current size of the hash table.
* However, a table of size up to DenseLimit will be re-sized only
* once the number of elements reaches the table's size.
*/
class PerfectHashing[Key](initialCapacity: Int = 8, capacityMultiple: Int = 2):
import PerfectHashing.DenseLimit

private var used: Int = _
private var table: Array[Int] = _
private var keys: Array[AnyRef] = _

clear()

protected def allocate(capacity: Int) =
keys = new Array[AnyRef](capacity)
if !isDense then
table = new Array[Int](capacity * roundToPower(capacityMultiple))

private def roundToPower(n: Int) =
if Integer.bitCount(n) == 1 then n
else 1 << (32 - Integer.numberOfLeadingZeros(n))

/** Remove keys from this map and set back to initial configuration */
def clear(): Unit =
used = 0
allocate(roundToPower(initialCapacity max 4))

/** The number of keys */
final def size: Int = used

/** The number of keys that can be stored without growing the tables */
final def capacity: Int = keys.length

private final def isDense = capacity <= DenseLimit

/** Hashcode, by default a post-processed versoon of `k.hashCode`,
* can be overridden
*/
protected def hash(k: Key): Int =
val h = k.hashCode
// Part of the MurmurHash3 32 bit finalizer
val i = (h ^ (h >>> 16)) * 0x85EBCA6B
val j = (i ^ (i >>> 13)) & 0x7FFFFFFF
if (j==0) 0x41081989 else j

/** Equality test, by default `equals`, can be overridden */
protected def isEqual(x: Key, y: Key): Boolean = x.equals(y)

private def matches(entry: Int, k: Key) = isEqual(key(entry), k)

private def tableIndex(x: Int): Int = x & (table.length - 1)
private def firstIndex(k: Key) = tableIndex(hash(k))
private def nextIndex(idx: Int) = tableIndex(idx + 1)

/** The key at index `idx` */
def key(idx: Int) = keys(idx).asInstanceOf[Key]
private def setKey(e: Int, k: Key) = keys(e) = k.asInstanceOf[AnyRef]

private def entry(idx: Int): Int = table(idx) - 1
private def setEntry(idx: Int, entry: Int) = table(idx) = entry + 1

/** An index `idx` such that `key(idx) == k`, or -1 if no such index exists */
def index(k: Key): Int =
if isDense then
var e = 0
while e < used do
if matches(e, k) then return e
e += 1
-1
else
var idx = firstIndex(k)
var e = entry(idx)
while e >= 0 && !matches(e, k) do
idx = nextIndex(idx)
e = entry(idx)
e

/** An index `idx` such that key(idx) == k.
* If no such index exists, create an entry with an index one
* larger than the previous one.
*/
def add(k: Key): Int =
if isDense then
var e = 0
while e < used do
if matches(e, k) then return e
e += 1
else
var idx = firstIndex(k)
var e = entry(idx)
while e >= 0 do
if matches(e, k) then return e
idx = nextIndex(idx)
e = entry(idx)
setEntry(idx, used)
end if
setKey(used, k)
used = used + 1
if used == capacity then growTable()
used - 1

private def rehash(): Unit =
var e = 0
while e < used do
var idx = firstIndex(key(e))
while entry(idx) >= 0 do idx = nextIndex(idx)
setEntry(idx, e)
e += 1

/** Grow backing arrays */
protected def growTable(): Unit =
val oldKeys = keys
allocate(capacity * 2)
Array.copy(oldKeys, 0, keys, 0, oldKeys.length)
if !isDense then rehash()

def keysIterator: Iterator[Key] =
keys.iterator.take(used).asInstanceOf[Iterator[Key]]
end PerfectHashing
Loading