File tree Expand file tree Collapse file tree 2 files changed +32
-2
lines changed
main/scala/org/apache/spark/sql/sources
test/scala/org/apache/spark/sql/sources Expand file tree Collapse file tree 2 files changed +32
-2
lines changed Original file line number Diff line number Diff line change @@ -198,7 +198,7 @@ object ResolvedDataSource {
198198 .asInstanceOf [org.apache.spark.sql.sources.SchemaRelationProvider ]
199199 .createRelation(sqlContext, new CaseInsensitiveMap (options), schema)
200200 case _ =>
201- sys.error(s " ${clazz.getCanonicalName} should extend SchemaRelationProvider . " )
201+ sys.error(s " ${clazz.getCanonicalName} does not allow user-specified schemas . " )
202202 }
203203 }
204204 case None => {
@@ -208,7 +208,7 @@ object ResolvedDataSource {
208208 .asInstanceOf [org.apache.spark.sql.sources.RelationProvider ]
209209 .createRelation(sqlContext, new CaseInsensitiveMap (options))
210210 case _ =>
211- sys.error(s " ${clazz.getCanonicalName} should extend RelationProvider ." )
211+ sys.error(s " A schema needs to be specified when using ${clazz.getCanonicalName}. " )
212212 }
213213 }
214214 }
Original file line number Diff line number Diff line change @@ -314,4 +314,34 @@ class TableScanSuite extends DataSourceTest {
314314 sql(" SELECT * FROM oneToTenDef" ),
315315 (1 to 10 ).map(Row (_)).toSeq)
316316 }
317+
318+ test(" exceptions" ) {
319+ // Make sure we do throw correct exception when users use a relation provider that
320+ // only implements the RelationProvier or the SchemaRelationProvider.
321+ val schemaNotAllowed = intercept[Exception ] {
322+ sql(
323+ """
324+ |CREATE TEMPORARY TABLE relationProvierWithSchema (i int)
325+ |USING org.apache.spark.sql.sources.SimpleScanSource
326+ |OPTIONS (
327+ | From '1',
328+ | To '10'
329+ |)
330+ """ .stripMargin)
331+ }
332+ assert(schemaNotAllowed.getMessage.contains(" does not allow user-specified schemas" ))
333+
334+ val schemaNeeded = intercept[Exception ] {
335+ sql(
336+ """
337+ |CREATE TEMPORARY TABLE schemaRelationProvierWithoutSchema
338+ |USING org.apache.spark.sql.sources.AllDataTypesScanSource
339+ |OPTIONS (
340+ | From '1',
341+ | To '10'
342+ |)
343+ """ .stripMargin)
344+ }
345+ assert(schemaNeeded.getMessage.contains(" A schema needs to be specified when using" ))
346+ }
317347}
You can’t perform that action at this time.
0 commit comments