File tree Expand file tree Collapse file tree 2 files changed +3
-4
lines changed
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst Expand file tree Collapse file tree 2 files changed +3
-4
lines changed Original file line number Diff line number Diff line change @@ -24,9 +24,8 @@ import scala.util.control.Exception.allCatch
2424import org .apache .spark .rdd .RDD
2525import org .apache .spark .sql .catalyst .analysis .TypeCoercion
2626import org .apache .spark .sql .catalyst .expressions .ExprUtils
27- import org .apache .spark .sql .catalyst .util .{DateFormatter , TimestampFormatter }
27+ import org .apache .spark .sql .catalyst .util .{DateFormatter , LegacyFastDateFormatter , TimestampFormatter }
2828import org .apache .spark .sql .catalyst .util .LegacyDateFormats .FAST_DATE_FORMAT
29- import org .apache .spark .sql .catalyst .util .LegacySimpleDateFormatter
3029import org .apache .spark .sql .errors .QueryExecutionErrors
3130import org .apache .spark .sql .types ._
3231
@@ -175,7 +174,7 @@ class CSVInferSchema(val options: CSVOptions) extends Serializable {
175174
176175 private def tryParseDateFormat (field : String ): DataType = {
177176 if (options.inferDateType
178- && ! dateFormatter.isInstanceOf [LegacySimpleDateFormatter ]
177+ && ! dateFormatter.isInstanceOf [LegacyFastDateFormatter ]
179178 && (allCatch opt dateFormatter.parse(field)).isDefined) {
180179 DateType
181180 } else {
Original file line number Diff line number Diff line change @@ -134,7 +134,7 @@ private[sql] class JsonInferSchema(options: JSONOptions) extends Serializable {
134134 if (options.prefersDecimal && decimalTry.isDefined) {
135135 decimalTry.get
136136 } else if (options.inferDateType
137- && ! dateFormatter.isInstanceOf [LegacySimpleDateFormatter ] &&
137+ && ! dateFormatter.isInstanceOf [LegacyFastDateFormatter ] &&
138138 (allCatch opt dateFormatter.parse(field)).isDefined) {
139139 DateType
140140 } else if (options.inferTimestamp &&
You can’t perform that action at this time.
0 commit comments