Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@
<!-- Version used for internal directory structure -->
<hive.version.short>2.3</hive.version.short>
<!-- note that this should be compatible with Kafka brokers version 0.10 and up -->
<kafka.version>3.2.0</kafka.version>
<kafka.version>3.2.1</kafka.version>
<!-- After 10.15.1.3, the minimum required version is JDK9 -->
<derby.version>10.14.2.0</derby.version>
<parquet.version>1.12.3</parquet.version>
Expand Down
3 changes: 3 additions & 0 deletions python/pyspark/pandas/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -1179,6 +1179,9 @@ def _shift(
if not isinstance(periods, int):
raise TypeError("periods should be an int; however, got [%s]" % type(periods).__name__)

if periods == 0:
return self.copy()

col = self.spark.column
window = (
Window.partitionBy(*part_cols)
Expand Down
1 change: 1 addition & 0 deletions python/pyspark/pandas/tests/test_dataframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -4249,6 +4249,7 @@ def test_shift(self):
psdf.columns = columns
self.assert_eq(pdf.shift(3), psdf.shift(3))
self.assert_eq(pdf.shift().shift(-1), psdf.shift().shift(-1))
self.assert_eq(pdf.shift(0), psdf.shift(0))

def test_diff(self):
pdf = pd.DataFrame(
Expand Down
2 changes: 2 additions & 0 deletions python/pyspark/pandas/tests/test_series.py
Original file line number Diff line number Diff line change
Expand Up @@ -1549,6 +1549,8 @@ def test_shift(self):
with self.assertRaisesRegex(TypeError, "periods should be an int; however"):
psser.shift(periods=1.5)

self.assert_eq(psser.shift(periods=0), pser.shift(periods=0))

def test_diff(self):
pser = pd.Series([10, 20, 15, 30, 45], name="x")
psser = ps.Series(pser)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,25 @@ package org.apache.spark.sql.internal.connector

import org.apache.spark.sql.catalyst.CatalystTypeConverters
import org.apache.spark.sql.connector.expressions.{LiteralValue, NamedReference}
import org.apache.spark.sql.connector.expressions.filter.Predicate
import org.apache.spark.sql.sources.{Filter, In}
import org.apache.spark.sql.connector.expressions.filter.{And => V2And, Not => V2Not, Or => V2Or, Predicate}
import org.apache.spark.sql.sources.{AlwaysFalse, AlwaysTrue, And, EqualNullSafe, EqualTo, Filter, GreaterThan, GreaterThanOrEqual, In, IsNotNull, IsNull, LessThan, LessThanOrEqual, Not, Or, StringContains, StringEndsWith, StringStartsWith}
import org.apache.spark.sql.types.StringType

private[sql] object PredicateUtils {

def toV1(predicate: Predicate): Option[Filter] = {

def isValidBinaryPredicate(): Boolean = {
if (predicate.children().length == 2 &&
predicate.children()(0).isInstanceOf[NamedReference] &&
predicate.children()(1).isInstanceOf[LiteralValue[_]]) {
true
} else {
false
}
}

predicate.name() match {
// TODO: add conversion for other V2 Predicate
case "IN" if predicate.children()(0).isInstanceOf[NamedReference] =>
val attribute = predicate.children()(0).toString
val values = predicate.children().drop(1)
Expand All @@ -43,6 +54,81 @@ private[sql] object PredicateUtils {
Some(In(attribute, Array.empty[Any]))
}

case "=" | "<=>" | ">" | "<" | ">=" | "<=" if isValidBinaryPredicate =>
val attribute = predicate.children()(0).toString
val value = predicate.children()(1).asInstanceOf[LiteralValue[_]]
val v1Value = CatalystTypeConverters.convertToScala(value.value, value.dataType)
val v1Filter = predicate.name() match {
case "=" => EqualTo(attribute, v1Value)
case "<=>" => EqualNullSafe(attribute, v1Value)
case ">" => GreaterThan(attribute, v1Value)
case ">=" => GreaterThanOrEqual(attribute, v1Value)
case "<" => LessThan(attribute, v1Value)
case "<=" => LessThanOrEqual(attribute, v1Value)
}
Some(v1Filter)

case "IS_NULL" | "IS_NOT_NULL" if predicate.children().length == 1 &&
predicate.children()(0).isInstanceOf[NamedReference] =>
val attribute = predicate.children()(0).toString
val v1Filter = predicate.name() match {
case "IS_NULL" => IsNull(attribute)
case "IS_NOT_NULL" => IsNotNull(attribute)
}
Some(v1Filter)

case "STARTS_WITH" | "ENDS_WITH" | "CONTAINS" if isValidBinaryPredicate =>
val attribute = predicate.children()(0).toString
val value = predicate.children()(1).asInstanceOf[LiteralValue[_]]
if (!value.dataType.sameType(StringType)) return None
val v1Value = value.value.toString
val v1Filter = predicate.name() match {
case "STARTS_WITH" =>
StringStartsWith(attribute, v1Value)
case "ENDS_WITH" =>
StringEndsWith(attribute, v1Value)
case "CONTAINS" =>
StringContains(attribute, v1Value)
}
Some(v1Filter)

case "ALWAYS_TRUE" | "ALWAYS_FALSE" if predicate.children().isEmpty =>
val v1Filter = predicate.name() match {
case "ALWAYS_TRUE" => AlwaysTrue()
case "ALWAYS_FALSE" => AlwaysFalse()
}
Some(v1Filter)

case "AND" =>
val and = predicate.asInstanceOf[V2And]
val left = toV1(and.left())
val right = toV1(and.right())
if (left.nonEmpty && right.nonEmpty) {
Some(And(left.get, right.get))
} else {
None
}

case "OR" =>
val or = predicate.asInstanceOf[V2Or]
val left = toV1(or.left())
val right = toV1(or.right())
if (left.nonEmpty && right.nonEmpty) {
Some(Or(left.get, right.get))
} else if (left.nonEmpty) {
left
} else {
right
}

case "NOT" =>
val child = toV1(predicate.asInstanceOf[V2Not].child())
if (child.nonEmpty) {
Some(Not(child.get))
} else {
None
}

case _ => None
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.connector.expressions.{Expression, FieldReference, Literal, LiteralValue}
import org.apache.spark.sql.connector.expressions.filter._
import org.apache.spark.sql.execution.datasources.v2.V2PredicateSuite.ref
import org.apache.spark.sql.internal.connector.PredicateUtils
import org.apache.spark.sql.sources.{AlwaysFalse => V1AlwaysFalse, AlwaysTrue => V1AlwaysTrue, And => V1And, EqualNullSafe, EqualTo, GreaterThan, GreaterThanOrEqual, In, IsNotNull, IsNull, LessThan, LessThanOrEqual, Not => V1Not, Or => V1Or, StringContains, StringEndsWith, StringStartsWith}
import org.apache.spark.sql.types.{IntegerType, StringType}
import org.apache.spark.unsafe.types.UTF8String
Expand All @@ -34,20 +35,29 @@ class V2PredicateSuite extends SparkFunSuite {
assert(predicate1.describe.equals("a.B = 1"))
val v1Filter1 = EqualTo(ref("a", "B").describe(), 1)
assert(v1Filter1.toV2 == predicate1)
assert(PredicateUtils.toV1(predicate1).get == v1Filter1)
assert(PredicateUtils.toV1(v1Filter1.toV2).get == v1Filter1)
assert(PredicateUtils.toV1(predicate1).get.toV2 == predicate1)

val predicate2 =
new Predicate("=", Array[Expression](ref("a", "b.c"), LiteralValue(1, IntegerType)))
assert(predicate2.references.map(_.describe()).toSeq == Seq("a.`b.c`"))
assert(predicate2.describe.equals("a.`b.c` = 1"))
val v1Filter2 = EqualTo(ref("a", "b.c").describe(), 1)
assert(v1Filter2.toV2 == predicate2)
assert(PredicateUtils.toV1(predicate2).get == v1Filter2)
assert(PredicateUtils.toV1(v1Filter2.toV2).get == v1Filter2)
assert(PredicateUtils.toV1(predicate2).get.toV2 == predicate2)

val predicate3 =
new Predicate("=", Array[Expression](ref("`a`.b", "c"), LiteralValue(1, IntegerType)))
assert(predicate3.references.map(_.describe()).toSeq == Seq("```a``.b`.c"))
assert(predicate3.describe.equals("```a``.b`.c = 1"))
val v1Filter3 = EqualTo(ref("`a`.b", "c").describe(), 1)
assert(v1Filter3.toV2 == predicate3)
assert(PredicateUtils.toV1(predicate3).get == v1Filter3)
assert(PredicateUtils.toV1(v1Filter3.toV2).get == v1Filter3)
assert(PredicateUtils.toV1(predicate3).get.toV2 == predicate3)
}

test("AlwaysTrue") {
Expand All @@ -59,6 +69,9 @@ class V2PredicateSuite extends SparkFunSuite {

val v1Filter = V1AlwaysTrue
assert(v1Filter.toV2 == predicate1)
assert(PredicateUtils.toV1(predicate1).get == v1Filter)
assert(PredicateUtils.toV1(v1Filter.toV2).get == v1Filter)
assert(PredicateUtils.toV1(predicate1).get.toV2 == predicate1)
}

test("AlwaysFalse") {
Expand All @@ -70,6 +83,9 @@ class V2PredicateSuite extends SparkFunSuite {

val v1Filter = V1AlwaysFalse
assert(v1Filter.toV2 == predicate1)
assert(PredicateUtils.toV1(predicate1).get == v1Filter)
assert(PredicateUtils.toV1(v1Filter.toV2).get == v1Filter)
assert(PredicateUtils.toV1(predicate1).get.toV2 == predicate1)
}

test("EqualTo") {
Expand All @@ -81,6 +97,9 @@ class V2PredicateSuite extends SparkFunSuite {

val v1Filter = EqualTo("a", 1)
assert(v1Filter.toV2 == predicate1)
assert(PredicateUtils.toV1(predicate1).get == v1Filter)
assert(PredicateUtils.toV1(v1Filter.toV2).get == v1Filter)
assert(PredicateUtils.toV1(predicate1).get.toV2 == predicate1)
}

test("EqualNullSafe") {
Expand All @@ -92,6 +111,9 @@ class V2PredicateSuite extends SparkFunSuite {

val v1Filter = EqualNullSafe("a", 1)
assert(v1Filter.toV2 == predicate1)
assert(PredicateUtils.toV1(predicate1).get == v1Filter)
assert(PredicateUtils.toV1(v1Filter.toV2).get == v1Filter)
assert(PredicateUtils.toV1(predicate1).get.toV2 == predicate1)
}

test("LessThan") {
Expand All @@ -103,6 +125,9 @@ class V2PredicateSuite extends SparkFunSuite {

val v1Filter = LessThan("a", 1)
assert(v1Filter.toV2 == predicate1)
assert(PredicateUtils.toV1(predicate1).get == v1Filter)
assert(PredicateUtils.toV1(v1Filter.toV2).get == v1Filter)
assert(PredicateUtils.toV1(predicate1).get.toV2 == predicate1)
}

test("LessThanOrEqual") {
Expand All @@ -114,6 +139,9 @@ class V2PredicateSuite extends SparkFunSuite {

val v1Filter = LessThanOrEqual("a", 1)
assert(v1Filter.toV2 == predicate1)
assert(PredicateUtils.toV1(predicate1).get == v1Filter)
assert(PredicateUtils.toV1(v1Filter.toV2).get == v1Filter)
assert(PredicateUtils.toV1(predicate1).get.toV2 == predicate1)
}

test("GreatThan") {
Expand All @@ -125,6 +153,9 @@ class V2PredicateSuite extends SparkFunSuite {

val v1Filter = GreaterThan("a", 1)
assert(v1Filter.toV2 == predicate1)
assert(PredicateUtils.toV1(predicate1).get == v1Filter)
assert(PredicateUtils.toV1(v1Filter.toV2).get == v1Filter)
assert(PredicateUtils.toV1(predicate1).get.toV2 == predicate1)
}

test("GreatThanOrEqual") {
Expand All @@ -136,6 +167,9 @@ class V2PredicateSuite extends SparkFunSuite {

val v1Filter = GreaterThanOrEqual("a", 1)
assert(v1Filter.toV2 == predicate1)
assert(PredicateUtils.toV1(predicate1).get == v1Filter)
assert(PredicateUtils.toV1(v1Filter.toV2).get == v1Filter)
assert(PredicateUtils.toV1(predicate1).get.toV2 == predicate1)
}

test("In") {
Expand All @@ -161,9 +195,15 @@ class V2PredicateSuite extends SparkFunSuite {

val v1Filter1 = In("a", Array(1, 2, 3, 4))
assert(v1Filter1.toV2 == predicate1)
assert(PredicateUtils.toV1(predicate1).get == v1Filter1)
assert(PredicateUtils.toV1(v1Filter1.toV2).get == v1Filter1)
assert(PredicateUtils.toV1(predicate1).get.toV2 == predicate1)

val v1Filter2 = In("a", values.map(_.value()))
assert(v1Filter2.toV2 == predicate3)
assert(PredicateUtils.toV1(predicate3).get == v1Filter2)
assert(PredicateUtils.toV1(v1Filter2.toV2).get == v1Filter2)
assert(PredicateUtils.toV1(predicate3).get.toV2 == predicate3)
}

test("IsNull") {
Expand All @@ -175,6 +215,9 @@ class V2PredicateSuite extends SparkFunSuite {

val v1Filter = IsNull("a")
assert(v1Filter.toV2 == predicate1)
assert(PredicateUtils.toV1(predicate1).get == v1Filter)
assert(PredicateUtils.toV1(v1Filter.toV2).get == v1Filter)
assert(PredicateUtils.toV1(predicate1).get.toV2 == predicate1)
}

test("IsNotNull") {
Expand All @@ -186,6 +229,9 @@ class V2PredicateSuite extends SparkFunSuite {

val v1Filter = IsNotNull("a")
assert(v1Filter.toV2 == predicate1)
assert(PredicateUtils.toV1(predicate1).get == v1Filter)
assert(PredicateUtils.toV1(v1Filter.toV2).get == v1Filter)
assert(PredicateUtils.toV1(predicate1).get.toV2 == predicate1)
}

test("Not") {
Expand All @@ -199,6 +245,14 @@ class V2PredicateSuite extends SparkFunSuite {

val v1Filter = V1Not(LessThan("a", 1))
assert(v1Filter.toV2 == predicate1)
assert(PredicateUtils.toV1(predicate1).get == v1Filter)
assert(PredicateUtils.toV1(v1Filter.toV2).get == v1Filter)
assert(PredicateUtils.toV1(predicate1).get.toV2 == predicate1)

val predicate3 = new Not(
new Predicate("=", Array[Expression](LiteralValue(1, IntegerType),
LiteralValue(1, IntegerType))))
assert(PredicateUtils.toV1(predicate3) == None)
}

test("And") {
Expand All @@ -214,6 +268,15 @@ class V2PredicateSuite extends SparkFunSuite {

val v1Filter = V1And(EqualTo("a", 1), EqualTo("b", 1))
assert(v1Filter.toV2 == predicate1)
assert(PredicateUtils.toV1(predicate1).get == v1Filter)
assert(PredicateUtils.toV1(v1Filter.toV2).get == v1Filter)
assert(PredicateUtils.toV1(predicate1).get.toV2 == predicate1)

val predicate3 = new And(
new Predicate("=", Array[Expression](ref("a"), LiteralValue(1, IntegerType))),
new Predicate("=", Array[Expression](LiteralValue(1, IntegerType),
LiteralValue(1, IntegerType))))
assert(PredicateUtils.toV1(predicate3) == None)
}

test("Or") {
Expand All @@ -229,6 +292,19 @@ class V2PredicateSuite extends SparkFunSuite {

val v1Filter = V1Or(EqualTo("a", 1), EqualTo("b", 1))
assert(v1Filter.toV2.equals(predicate1))
assert(PredicateUtils.toV1(predicate1).get == v1Filter)
assert(PredicateUtils.toV1(v1Filter.toV2).get == v1Filter)
assert(PredicateUtils.toV1(predicate1).get.toV2 == predicate1)

val left = new Predicate("=", Array[Expression](ref("a"), LiteralValue(1, IntegerType)))
val predicate3 = new Or(left,
new Predicate("=", Array[Expression](LiteralValue(1, IntegerType))))
assert(PredicateUtils.toV1(predicate3) == PredicateUtils.toV1(left))

val predicate4 = new Or(
new Predicate("=", Array[Expression](LiteralValue(1, IntegerType))),
new Predicate("=", Array[Expression](LiteralValue(1, IntegerType))))
assert(PredicateUtils.toV1(predicate4) == None)
}

test("StringStartsWith") {
Expand All @@ -243,6 +319,9 @@ class V2PredicateSuite extends SparkFunSuite {

val v1Filter = StringStartsWith("a", "str")
assert(v1Filter.toV2.equals(predicate1))
assert(PredicateUtils.toV1(predicate1).get == v1Filter)
assert(PredicateUtils.toV1(v1Filter.toV2).get == v1Filter)
assert(PredicateUtils.toV1(predicate1).get.toV2 == predicate1)
}

test("StringEndsWith") {
Expand All @@ -257,6 +336,9 @@ class V2PredicateSuite extends SparkFunSuite {

val v1Filter = StringEndsWith("a", "str")
assert(v1Filter.toV2.equals(predicate1))
assert(PredicateUtils.toV1(predicate1).get == v1Filter)
assert(PredicateUtils.toV1(v1Filter.toV2).get == v1Filter)
assert(PredicateUtils.toV1(predicate1).get.toV2 == predicate1)
}

test("StringContains") {
Expand All @@ -271,6 +353,9 @@ class V2PredicateSuite extends SparkFunSuite {

val v1Filter = StringContains("a", "str")
assert(v1Filter.toV2.equals(predicate1))
assert(PredicateUtils.toV1(predicate1).get == v1Filter)
assert(PredicateUtils.toV1(v1Filter.toV2).get == v1Filter)
assert(PredicateUtils.toV1(predicate1).get.toV2 == predicate1)
}
}

Expand Down