Skip to content

Commit ca59445

Browse files
srowenhvanhovell
authored andcommitted
[SPARK-21418][SQL] NoSuchElementException: None.get in DataSourceScanExec with sun.io.serialization.extendedDebugInfo=true
## What changes were proposed in this pull request? If no SparkConf is available to Utils.redact, simply don't redact. ## How was this patch tested? Existing tests Author: Sean Owen <sowen@cloudera.com> Closes #19123 from srowen/SPARK-21418.
1 parent 9f30d92 commit ca59445

File tree

2 files changed

+7
-4
lines changed

2 files changed

+7
-4
lines changed

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2639,9 +2639,12 @@ private[spark] object Utils extends Logging {
26392639
* Redact the sensitive information in the given string.
26402640
*/
26412641
def redact(conf: SparkConf, text: String): String = {
2642-
if (text == null || text.isEmpty || !conf.contains(STRING_REDACTION_PATTERN)) return text
2643-
val regex = conf.get(STRING_REDACTION_PATTERN).get
2644-
regex.replaceAllIn(text, REDACTION_REPLACEMENT_TEXT)
2642+
if (text == null || text.isEmpty || conf == null || !conf.contains(STRING_REDACTION_PATTERN)) {
2643+
text
2644+
} else {
2645+
val regex = conf.get(STRING_REDACTION_PATTERN).get
2646+
regex.replaceAllIn(text, REDACTION_REPLACEMENT_TEXT)
2647+
}
26452648
}
26462649

26472650
private def redact(redactionPattern: Regex, kvs: Seq[(String, String)]): Seq[(String, String)] = {

sql/core/src/main/scala/org/apache/spark/sql/execution/DataSourceScanExec.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ trait DataSourceScanExec extends LeafExecNode with CodegenSupport {
6969
* Shorthand for calling redactString() without specifying redacting rules
7070
*/
7171
private def redact(text: String): String = {
72-
Utils.redact(SparkSession.getActiveSession.get.sparkContext.conf, text)
72+
Utils.redact(SparkSession.getActiveSession.map(_.sparkContext.conf).orNull, text)
7373
}
7474
}
7575

0 commit comments

Comments
 (0)