diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index a819cddcae98..ecb941c5fa9e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -1228,10 +1228,12 @@ class SQLConf extends Serializable with Logging { * not set yet, return `defaultValue`. */ def getConfString(key: String, defaultValue: String): String = { - val entry = sqlConfEntries.get(key) - if (entry != null && defaultValue != "") { - // Only verify configs in the SQLConf object - entry.valueConverter(defaultValue) + if (defaultValue != null && defaultValue != "") { + val entry = sqlConfEntries.get(key) + if (entry != null) { + // Only verify configs in the SQLConf object + entry.valueConverter(defaultValue) + } } Option(settings.get(key)).getOrElse(defaultValue) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala index a283ff971adc..948f179f5e8f 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala @@ -270,4 +270,15 @@ class SQLConfSuite extends QueryTest with SharedSQLContext { val e2 = intercept[AnalysisException](spark.conf.unset(SCHEMA_STRING_LENGTH_THRESHOLD.key)) assert(e2.message.contains("Cannot modify the value of a static config")) } + + test("SPARK-21588 SQLContext.getConf(key, null) should return null") { + withSQLConf(SQLConf.SHUFFLE_PARTITIONS.key -> "1") { + assert("1" == spark.conf.get(SQLConf.SHUFFLE_PARTITIONS.key, null)) + assert("1" == spark.conf.get(SQLConf.SHUFFLE_PARTITIONS.key, "")) + } + + assert(spark.conf.getOption("spark.sql.nonexistent").isEmpty) + assert(null == spark.conf.get("spark.sql.nonexistent", null)) + assert("" == spark.conf.get("spark.sql.nonexistent", "")) + } }