@@ -32,7 +32,7 @@ import org.apache.spark.sql.catalyst.expressions.PythonUDF
3232import org .apache .spark .sql .catalyst .types .DataTypeUtils .toAttributes
3333import org .apache .spark .sql .catalyst .util .CaseInsensitiveMap
3434import org .apache .spark .sql .connector .catalog .{SupportsRead , Table , TableCapability , TableProvider }
35- import org .apache .spark .sql .connector .catalog .TableCapability .{ BATCH_READ , BATCH_WRITE }
35+ import org .apache .spark .sql .connector .catalog .TableCapability .BATCH_READ
3636import org .apache .spark .sql .connector .expressions .Transform
3737import org .apache .spark .sql .connector .read .{Batch , InputPartition , PartitionReader , PartitionReaderFactory , Scan , ScanBuilder }
3838import org .apache .spark .sql .errors .QueryCompilationErrors
@@ -60,13 +60,12 @@ class PythonTableProvider(shortName: String) extends TableProvider {
6060 schema : StructType ,
6161 partitioning : Array [Transform ],
6262 properties : java.util.Map [String , String ]): Table = {
63- assert(partitioning.isEmpty)
6463 val outputSchema = schema
6564 new Table with SupportsRead {
6665 override def name (): String = shortName
6766
6867 override def capabilities (): java.util.Set [TableCapability ] = java.util.EnumSet .of(
69- BATCH_READ , BATCH_WRITE )
68+ BATCH_READ )
7069
7170 override def newScanBuilder (options : CaseInsensitiveStringMap ): ScanBuilder = {
7271 new ScanBuilder with Batch with Scan {
0 commit comments