Skip to content

Commit

Permalink
Prefer mutable data structure to reduce memory usage in macros (#2932)
Browse files Browse the repository at this point in the history
* Prefer mutable data structure for speed

* Prefer mutable data structure to reduce memory usage in macros (#2933)

* Prefer mutable data structure to reduce memory usage in macros

* Prefer mutable data structure to reduce memory usage in macros

* Prefer mutable data structure to reduce memory usage in macros

* Prefer mutable data structure to reduce memory usage in macros

* Prefer ListBuffer
  • Loading branch information
guizmaii authored Oct 21, 2023
1 parent 0ea8815 commit 348c8e5
Show file tree
Hide file tree
Showing 3 changed files with 84 additions and 76 deletions.
119 changes: 63 additions & 56 deletions src/main/scala/io/getquill/idiom/ReifyStatement.scala
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
package io.getquill.idiom

import io.getquill.ast._
import io.getquill.util.Interleave
import io.getquill.idiom.StatementInterpolator._
import io.getquill.util.Interleave

import scala.annotation.tailrec
import scala.collection.mutable.ListBuffer

object ReifyStatement {

Expand All @@ -15,60 +16,62 @@ object ReifyStatement {
forProbing: Boolean
): (String, List[External]) = {
val expanded =
forProbing match {
case true => statement
case false => expandLiftings(statement, emptySetContainsToken)
}
if (forProbing) statement
else expandLiftings(statement, emptySetContainsToken)

token2string(expanded, liftingPlaceholder)
}

private def token2string(token: Token, liftingPlaceholder: Int => String): (String, List[External]) = {
@tailrec
def apply(
workList: List[Token],
sqlResult: Seq[String],
liftingResult: Seq[External],
sqlResult: ListBuffer[String],
liftingResult: ListBuffer[External],
liftingSize: Int
): (String, List[External]) = workList match {
case Nil => sqlResult.reverse.mkString("") -> liftingResult.reverse.toList
case head :: tail =>
head match {
case StringToken(s2) => apply(tail, s2 +: sqlResult, liftingResult, liftingSize)
case SetContainsToken(a, op, b) => apply(stmt"$a $op ($b)" +: tail, sqlResult, liftingResult, liftingSize)
case ScalarLiftToken(lift) =>
apply(tail, liftingPlaceholder(liftingSize) +: sqlResult, lift +: liftingResult, liftingSize + 1)
case ScalarTagToken(tag) =>
apply(tail, liftingPlaceholder(liftingSize) +: sqlResult, tag +: liftingResult, liftingSize + 1)
case Statement(tokens) => apply(tokens.foldRight(tail)(_ +: _), sqlResult, liftingResult, liftingSize)
case ValuesClauseToken(stmt) => apply(stmt +: tail, sqlResult, liftingResult, liftingSize)
case _: QuotationTagToken =>
throw new UnsupportedOperationException("Quotation Tags must be resolved before a reification.")
}
}
): (String, List[External]) =
workList match {
case Nil => sqlResult.mkString("") -> liftingResult.toList
case head :: tail =>
head match {
case StringToken(s2) => apply(tail, sqlResult += s2, liftingResult, liftingSize)
case SetContainsToken(a, op, b) => apply(stmt"$a $op ($b)" +: tail, sqlResult, liftingResult, liftingSize)
case ScalarLiftToken(lift) =>
apply(tail, sqlResult += liftingPlaceholder(liftingSize), liftingResult += lift, liftingSize + 1)
case ScalarTagToken(tag) =>
apply(tail, sqlResult += liftingPlaceholder(liftingSize), liftingResult += tag, liftingSize + 1)
case Statement(tokens) => apply(tokens.foldRight(tail)(_ +: _), sqlResult, liftingResult, liftingSize)
case ValuesClauseToken(stmt) => apply(stmt +: tail, sqlResult, liftingResult, liftingSize)
case _: QuotationTagToken =>
throw new UnsupportedOperationException("Quotation Tags must be resolved before a reification.")
}
}

apply(List(token), Seq(), Seq(), 0)
apply(List(token), ListBuffer.empty, ListBuffer.empty, 0)
}

private def expandLiftings(statement: Statement, emptySetContainsToken: Token => Token) =
private def expandLiftings(statement: Statement, emptySetContainsToken: Token => Token): Statement =
Statement {
statement.tokens.foldLeft(List.empty[Token]) {
case (tokens, SetContainsToken(a, op, ScalarLiftToken(lift: ScalarQueryLift))) =>
lift.value.asInstanceOf[Iterable[Any]].toList match {
case Nil => tokens :+ emptySetContainsToken(a)
case values =>
val liftings = values.map(v =>
ScalarLiftToken(ScalarValueLift(lift.name, External.Source.Parser, v, lift.encoder, lift.quat))
)
val separators = List.fill(liftings.size - 1)(StringToken(", "))
(tokens :+ stmt"$a $op (") ++ Interleave(liftings, separators) :+ StringToken(")")
}
case (tokens, token) =>
tokens :+ token
}
statement.tokens
.foldLeft(List.empty[Token]) {
case (tokens, SetContainsToken(a, op, ScalarLiftToken(lift: ScalarQueryLift))) =>
lift.value.asInstanceOf[Iterable[Any]].toList match {
case Nil => tokens :+ emptySetContainsToken(a)
case values =>
val liftings = values.map(v =>
ScalarLiftToken(ScalarValueLift(lift.name, External.Source.Parser, v, lift.encoder, lift.quat))
)
val separators = List.fill(liftings.size - 1)(StringToken(", "))
(tokens :+ stmt"$a $op (") ++ Interleave(liftings, separators) :+ StringToken(")")
}
case (tokens, token) =>
tokens :+ token
}
}
}

object ReifyStatementWithInjectables {
import Tokens._

def apply[T](
liftingPlaceholder: Int => String,
Expand All @@ -79,10 +82,9 @@ object ReifyStatementWithInjectables {
injectables: List[(String, T => ScalarLift)]
): (String, List[External]) = {
val expanded =
forProbing match {
case true => statement
case false => expandLiftings(statement, emptySetContainsToken, subBatch, injectables.toMap)
}
if (forProbing) statement
else expandLiftings(statement, emptySetContainsToken, subBatch, injectables.toMap)

val (query, externals) = token2string(expanded, liftingPlaceholder)
(query, externals)
}
Expand All @@ -91,37 +93,37 @@ object ReifyStatementWithInjectables {
@tailrec
def apply(
workList: List[Token],
sqlResult: Seq[String],
liftingResult: Seq[External],
sqlResult: ListBuffer[String],
liftingResult: ListBuffer[External],
liftingSize: Int
): (String, List[External]) = workList match {
case Nil => sqlResult.reverse.mkString("") -> liftingResult.reverse.toList
case Nil => sqlResult.mkString("") -> liftingResult.toList
case head :: tail =>
head match {
case StringToken(s2) => apply(tail, s2 +: sqlResult, liftingResult, liftingSize)
case StringToken(s2) => apply(tail, sqlResult += s2, liftingResult, liftingSize)
case SetContainsToken(a, op, b) => apply(stmt"$a $op ($b)" +: tail, sqlResult, liftingResult, liftingSize)
case ScalarLiftToken(lift) =>
apply(tail, liftingPlaceholder(liftingSize) +: sqlResult, lift +: liftingResult, liftingSize + 1)
apply(tail, sqlResult += liftingPlaceholder(liftingSize), liftingResult += lift, liftingSize + 1)
case ScalarTagToken(tag) =>
apply(tail, liftingPlaceholder(liftingSize) +: sqlResult, tag +: liftingResult, liftingSize + 1)
apply(tail, sqlResult += liftingPlaceholder(liftingSize), liftingResult += tag, liftingSize + 1)
case Statement(tokens) => apply(tokens.foldRight(tail)(_ +: _), sqlResult, liftingResult, liftingSize)
case ValuesClauseToken(stmt) => apply(stmt +: tail, sqlResult, liftingResult, liftingSize)
case _: QuotationTagToken =>
throw new UnsupportedOperationException("Quotation Tags must be resolved before a reification.")
}
}

apply(List(token), Seq(), Seq(), 0)
apply(List(token), ListBuffer.empty, ListBuffer.empty, 0)
}

private def expandLiftings[T](
statement: Statement,
emptySetContainsToken: Token => Token,
subBatch: List[T],
injectables: collection.Map[String, T => ScalarLift]
) = {
): Statement = {

def resolveInjectableValue(v: ScalarTagToken, value: T) = {
def resolveInjectableValue(v: ScalarTagToken, value: T): ScalarLiftToken = {
val injectable =
// Look up the right uuid:String to get the right <some-field> for ((p:Person) => ScalarLift(p.<some-field>))
injectables.get(v.tag.uid) match {
Expand Down Expand Up @@ -170,21 +172,26 @@ object ReifyStatementWithInjectables {
}
case (tokens, valuesClause: ValuesClauseToken) =>
val pluggedClauses = subBatch.map(value => plugScalarTags(valuesClause, value))
val separators = List.fill(pluggedClauses.size - 1)(StringToken(", "))
(tokens ++ Interleave(pluggedClauses.toList, separators))
val separators = List.fill(pluggedClauses.size - 1)(`, `)
tokens ++ Interleave(pluggedClauses, separators)
case (tokens, SetContainsToken(a, op, ScalarLiftToken(lift: ScalarQueryLift))) =>
lift.value.asInstanceOf[Iterable[Any]].toList match {
case Nil => tokens :+ emptySetContainsToken(a)
case values =>
val liftings = values.map(v =>
ScalarLiftToken(ScalarValueLift(lift.name, External.Source.Parser, v, lift.encoder, lift.quat))
)
val separators = List.fill(liftings.size - 1)(StringToken(", "))
(tokens :+ stmt"$a $op (") ++ Interleave(liftings, separators) :+ StringToken(")")
val separators = List.fill(liftings.size - 1)(`, `)
(tokens :+ stmt"$a $op (") ++ Interleave(liftings, separators) :+ `)`
}
case (tokens, token) =>
tokens :+ token
}
}
}
}

private[idiom] object Tokens {
val `, ` : StringToken = StringToken(", ")
val `)` : StringToken = StringToken(")")
}
30 changes: 15 additions & 15 deletions src/main/scala/io/getquill/idiom/Statement.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2,33 +2,33 @@ package io.getquill.idiom

import io.getquill.ast._

sealed trait Token
sealed trait Token extends Product with Serializable
sealed trait TagToken extends Token

case class StringToken(string: String) extends Token {
override def toString = string
final case class StringToken(string: String) extends Token {
override def toString: String = string
}

case class ScalarTagToken(tag: ScalarTag) extends TagToken {
override def toString = s"lift(${tag.uid})"
final case class ScalarTagToken(tag: ScalarTag) extends TagToken {
override def toString: String = s"lift(${tag.uid})"
}

case class QuotationTagToken(tag: QuotationTag) extends TagToken {
override def toString = s"quoted(${tag.uid})"
final case class QuotationTagToken(tag: QuotationTag) extends TagToken {
override def toString: String = s"quoted(${tag.uid})"
}

case class ScalarLiftToken(lift: ScalarLift) extends Token {
override def toString = s"lift(${lift.name})"
final case class ScalarLiftToken(lift: ScalarLift) extends Token {
override def toString: String = s"lift(${lift.name})"
}

case class ValuesClauseToken(statement: Statement) extends Token {
override def toString = statement.toString
final case class ValuesClauseToken(statement: Statement) extends Token {
override def toString: String = statement.toString
}

case class Statement(tokens: List[Token]) extends Token {
override def toString = tokens.mkString
final case class Statement(tokens: List[Token]) extends Token {
override def toString: String = tokens.mkString
}

case class SetContainsToken(a: Token, op: Token, b: Token) extends Token {
override def toString = s"${a.toString} ${op.toString} (${b.toString})"
final case class SetContainsToken(a: Token, op: Token, b: Token) extends Token {
override def toString: String = s"${a.toString} ${op.toString} (${b.toString})"
}
11 changes: 6 additions & 5 deletions src/main/scala/io/getquill/util/Interleave.scala
Original file line number Diff line number Diff line change
@@ -1,17 +1,18 @@
package io.getquill.util

import scala.annotation.tailrec
import scala.collection.mutable.ListBuffer

object Interleave {

def apply[T](l1: List[T], l2: List[T]): List[T] =
interleave(l1, l2, List.empty)
interleave(l1, l2, ListBuffer.empty)

@tailrec
private[this] def interleave[T](l1: List[T], l2: List[T], acc: List[T]): List[T] =
private[this] def interleave[T](l1: List[T], l2: List[T], acc: ListBuffer[T]): List[T] =
(l1, l2) match {
case (Nil, l2) => acc.reverse ++ l2
case (l1, Nil) => acc.reverse ++ l1
case (h1 :: t1, h2 :: t2) => interleave(t1, t2, h2 +: h1 +: acc)
case (Nil, l2) => (acc ++ l2).toList
case (l1, Nil) => (acc ++ l1).toList
case (h1 :: t1, h2 :: t2) => interleave(t1, t2, { acc += h1; acc += h2 })
}
}

0 comments on commit 348c8e5

Please sign in to comment.