Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove useless match in liftTokenizer #2899

Merged
merged 2 commits into from
Oct 2, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -6,31 +6,34 @@ import io.getquill.util.Messages._

import scala.collection.mutable.ListBuffer

//noinspection ConvertExpressionToSAM
object StatementInterpolator {

trait Tokenizer[T] {
def token(v: T): Token
}

object Tokenizer {
def apply[T](f: T => Token) = new Tokenizer[T] {
def token(v: T) = f(v)
}
def apply[T](f: T => Token): Tokenizer[T] =
new Tokenizer[T] {
def token(v: T): Token = f(v)
}

def withFallback[T](
fallback: Tokenizer[T] => Tokenizer[T]
)(pf: PartialFunction[T, Token]) =
)(pf: PartialFunction[T, Token]): Tokenizer[T] =
new Tokenizer[T] {
private val stable = fallback(this)
override def token(v: T) = pf.applyOrElse(v, stable.token)
private lazy val stable: Tokenizer[T] = fallback(this)
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This was weird to me. Why would we need to compute the fallback value before to know if we'll ever need it one day? 🤔

override def token(v: T): Token = pf.applyOrElse(v, stable.token)
}
}

implicit class TokenImplicit[T](v: T)(implicit tokenizer: Tokenizer[T]) {
def token = tokenizer.token(v)
implicit final class TokenImplicit[T](private val v: T) extends AnyVal {
def token(implicit tokenizer: Tokenizer[T]): Token = tokenizer.token(v)
}

implicit def stringTokenizer: Tokenizer[String] =
Tokenizer[String] { case string =>
implicit val stringTokenizer: Tokenizer[String] =
Tokenizer[String] { string =>
StringToken(string)
}

Expand All @@ -43,19 +46,15 @@ object StatementInterpolator {
case lift: Lift => liftTokenizer.token(lift)
}

implicit def tagTokenizer: Tokenizer[Tag] =
implicit val tagTokenizer: Tokenizer[Tag] =
Tokenizer[Tag] {
case tag: ScalarTag => ScalarTagToken(tag)
case tag: QuotationTag => QuotationTagToken(tag)
}

implicit def liftTokenizer: Tokenizer[Lift] =
implicit val liftTokenizer: Tokenizer[Lift] =
Tokenizer[Lift] {
case tag: ScalarTag => ScalarTagToken(tag)
case tag: QuotationTag => QuotationTagToken(tag)
case lift: ScalarLift => ScalarLiftToken(lift)
// TODO Longer Explanation
case lift: Tag => fail("Cannot tokenizer a non-scalar tagging.")
Comment on lines -54 to -58
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

AFAICT, a Lift can't be a ScalarTag, nor a QuotationTag nor a Tag

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good catch! I think there was a point at which Tag was a sub-type of Lift but I changed that a while ago.

case lift: ScalarLift => ScalarLiftToken(lift)
case lift: Lift =>
fail(
s"Can't tokenize a non-scalar lifting. ${lift.name}\n" +
Expand Down Expand Up @@ -93,30 +92,28 @@ object StatementInterpolator {
)
}

implicit def tokenTokenizer: Tokenizer[Token] = Tokenizer[Token](identity)
implicit def statementTokenizer: Tokenizer[Statement] =
implicit val tokenTokenizer: Tokenizer[Token] = Tokenizer[Token](identity)
implicit val statementTokenizer: Tokenizer[Statement] =
Tokenizer[Statement](identity)
implicit def stringTokenTokenizer: Tokenizer[StringToken] =
implicit val stringTokenTokenizer: Tokenizer[StringToken] =
Tokenizer[StringToken](identity)
implicit def liftingTokenTokenizer: Tokenizer[ScalarLiftToken] =
implicit val liftingTokenTokenizer: Tokenizer[ScalarLiftToken] =
Comment on lines +95 to +100
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We don't need to compute them more than once

Tokenizer[ScalarLiftToken](identity)

implicit class TokenList[T](list: List[T]) {
def mkStmt(sep: String = ", ")(implicit tokenize: Tokenizer[T]) = {
implicit final class TokenList[T](private val list: List[T]) extends AnyVal {
def mkStmt(sep: String = ", ")(implicit tokenize: Tokenizer[T]): Statement = {
val l1 = list.map(_.token)
val l2 = List.fill(l1.size - 1)(StringToken(sep))
Statement(Interleave(l1, l2))
}
}

implicit def listTokenizer[T](implicit
tokenize: Tokenizer[T]
): Tokenizer[List[T]] =
Tokenizer[List[T]] { case list =>
implicit def listTokenizer[T](implicit tokenize: Tokenizer[T]): Tokenizer[List[T]] =
Tokenizer[List[T]] { list =>
list.mkStmt()
}

implicit class Impl(sc: StringContext) {
implicit final class Impl(private val sc: StringContext) extends AnyVal {

private def flatten(tokens: List[Token]): List[Token] = {

Expand Down Expand Up @@ -147,7 +144,7 @@ object StatementInterpolator {
}

(unnestStatements _)
.andThen(mergeStringTokens _)
.andThen(mergeStringTokens)
.apply(tokens)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,19 +10,19 @@ import scala.reflect.{ClassTag, classTag}

trait PostgresJsonExtensions { this: Encoders with Decoders =>

implicit def jsonEntityEncoder[T](implicit jsonEncoder: JsonEncoder[T]): Encoder[JsonValue[T]] =
implicit final def jsonEntityEncoder[T](implicit jsonEncoder: JsonEncoder[T]): Encoder[JsonValue[T]] =
entityEncoder[T, JsonValue[T]](_.value)("json", jsonEncoder)
implicit def jsonEntityDecoder[T: ClassTag](implicit jsonDecoder: JsonDecoder[T]): Decoder[JsonValue[T]] =
implicit final def jsonEntityDecoder[T: ClassTag](implicit jsonDecoder: JsonDecoder[T]): Decoder[JsonValue[T]] =
entityDecoder[T, JsonValue[T]](JsonValue(_))("json", jsonDecoder)
implicit def jsonbEntityEncoder[T](implicit jsonEncoder: JsonEncoder[T]): Encoder[JsonbValue[T]] =
implicit final def jsonbEntityEncoder[T](implicit jsonEncoder: JsonEncoder[T]): Encoder[JsonbValue[T]] =
entityEncoder[T, JsonbValue[T]](_.value)("jsonb", jsonEncoder)
implicit def jsonbEntityDecoder[T: ClassTag](implicit jsonDecoder: JsonDecoder[T]): Decoder[JsonbValue[T]] =
implicit final def jsonbEntityDecoder[T: ClassTag](implicit jsonDecoder: JsonDecoder[T]): Decoder[JsonbValue[T]] =
entityDecoder[T, JsonbValue[T]](JsonbValue(_))("jsonb", jsonDecoder)

implicit def jsonAstEncoder: Encoder[JsonValue[Json]] = astEncoder(_.value.toString(), "json")
implicit def jsonAstDecoder: Decoder[JsonValue[Json]] = astDecoder(JsonValue(_))
implicit def jsonbAstEncoder: Encoder[JsonbValue[Json]] = astEncoder(_.value.toString(), "jsonb")
implicit def jsonbAstDecoder: Decoder[JsonbValue[Json]] = astDecoder(JsonbValue(_))
implicit final val jsonAstEncoder: Encoder[JsonValue[Json]] = astEncoder(_.value.toString(), "json")
implicit final val jsonAstDecoder: Decoder[JsonValue[Json]] = astDecoder(JsonValue(_))
implicit final val jsonbAstEncoder: Encoder[JsonbValue[Json]] = astEncoder(_.value.toString(), "jsonb")
implicit final val jsonbAstDecoder: Decoder[JsonbValue[Json]] = astDecoder(JsonbValue(_))
Comment on lines +22 to +25
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We don't need to compute them more than once


protected def astEncoder[Wrapper](valueToString: Wrapper => String, jsonType: String): Encoder[Wrapper] =
encoder(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,19 +10,19 @@ import scala.reflect.{ClassTag, classTag}

trait PostgresJsonExtensions { this: Encoders with Decoders =>

implicit def jsonEntityEncoder[T](implicit jsonEncoder: JsonEncoder[T]): Encoder[JsonValue[T]] =
implicit final def jsonEntityEncoder[T](implicit jsonEncoder: JsonEncoder[T]): Encoder[JsonValue[T]] =
entityEncoder[T, JsonValue[T]](_.value)("json", jsonEncoder)
implicit def jsonEntityDecoder[T: ClassTag](implicit jsonDecoder: JsonDecoder[T]): Decoder[JsonValue[T]] =
implicit final def jsonEntityDecoder[T: ClassTag](implicit jsonDecoder: JsonDecoder[T]): Decoder[JsonValue[T]] =
entityDecoder[T, JsonValue[T]](JsonValue(_))("json", jsonDecoder)
implicit def jsonbEntityEncoder[T](implicit jsonEncoder: JsonEncoder[T]): Encoder[JsonbValue[T]] =
implicit final def jsonbEntityEncoder[T](implicit jsonEncoder: JsonEncoder[T]): Encoder[JsonbValue[T]] =
entityEncoder[T, JsonbValue[T]](_.value)("jsonb", jsonEncoder)
implicit def jsonbEntityDecoder[T: ClassTag](implicit jsonDecoder: JsonDecoder[T]): Decoder[JsonbValue[T]] =
implicit final def jsonbEntityDecoder[T: ClassTag](implicit jsonDecoder: JsonDecoder[T]): Decoder[JsonbValue[T]] =
entityDecoder[T, JsonbValue[T]](JsonbValue(_))("jsonb", jsonDecoder)

implicit def jsonAstEncoder: Encoder[JsonValue[Json]] = astEncoder(_.value.toString(), "json")
implicit def jsonAstDecoder: Decoder[JsonValue[Json]] = astDecoder(JsonValue(_))
implicit def jsonbAstEncoder: Encoder[JsonbValue[Json]] = astEncoder(_.value.toString(), "jsonb")
implicit def jsonbAstDecoder: Decoder[JsonbValue[Json]] = astDecoder(JsonbValue(_))
implicit final val jsonAstEncoder: Encoder[JsonValue[Json]] = astEncoder(_.value.toString(), "json")
implicit final val jsonAstDecoder: Decoder[JsonValue[Json]] = astDecoder(JsonValue(_))
implicit final val jsonbAstEncoder: Encoder[JsonbValue[Json]] = astEncoder(_.value.toString(), "jsonb")
implicit final val jsonbAstDecoder: Decoder[JsonbValue[Json]] = astDecoder(JsonbValue(_))
Comment on lines +22 to +25
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We don't need to compute them more than once


def astEncoder[Wrapper](valueToString: Wrapper => String, jsonType: String): Encoder[Wrapper] =
encoder(
Expand Down