Skip to content

Commit 7a23cb4

Browse files
committed
Address comments
1 parent 8846bf5 commit 7a23cb4

File tree

1 file changed

+2
-3
lines changed

1 file changed

+2
-3
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/python/UserDefinedPythonDataSource.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ import org.apache.spark.sql.catalyst.expressions.PythonUDF
3232
import org.apache.spark.sql.catalyst.types.DataTypeUtils.toAttributes
3333
import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap
3434
import org.apache.spark.sql.connector.catalog.{SupportsRead, Table, TableCapability, TableProvider}
35-
import org.apache.spark.sql.connector.catalog.TableCapability.{BATCH_READ, BATCH_WRITE}
35+
import org.apache.spark.sql.connector.catalog.TableCapability.BATCH_READ
3636
import org.apache.spark.sql.connector.expressions.Transform
3737
import org.apache.spark.sql.connector.read.{Batch, InputPartition, PartitionReader, PartitionReaderFactory, Scan, ScanBuilder}
3838
import org.apache.spark.sql.errors.QueryCompilationErrors
@@ -60,13 +60,12 @@ class PythonTableProvider(shortName: String) extends TableProvider {
6060
schema: StructType,
6161
partitioning: Array[Transform],
6262
properties: java.util.Map[String, String]): Table = {
63-
assert(partitioning.isEmpty)
6463
val outputSchema = schema
6564
new Table with SupportsRead {
6665
override def name(): String = shortName
6766

6867
override def capabilities(): java.util.Set[TableCapability] = java.util.EnumSet.of(
69-
BATCH_READ, BATCH_WRITE)
68+
BATCH_READ)
7069

7170
override def newScanBuilder(options: CaseInsensitiveStringMap): ScanBuilder = {
7271
new ScanBuilder with Batch with Scan {

0 commit comments

Comments
 (0)