@@ -643,26 +643,7 @@ private[spark] class HiveExternalCatalog(conf: SparkConf, hadoopConf: Configurat
643643 // Add table metadata such as table schema, partition columns, etc. to table properties.
644644 val updatedProperties = rawTable.properties ++ tableMetaToTableProps(rawTable, schema)
645645
646- // Detect whether this is a Hive-compatible table.
647- val provider = rawTable.properties.get(DATASOURCE_PROVIDER )
648- val isHiveCompatible = if (provider.isDefined && provider != Some (DDLUtils .HIVE_PROVIDER )) {
649- rawTable.properties.get(DATASOURCE_HIVE_COMPATIBLE ) match {
650- case Some (value) =>
651- value.toBoolean
652- case _ =>
653- // If the property is not set, the table may have been created by an old version
654- // of Spark. Detect Hive compatibility by comparing the table's serde with the
655- // serde for the table's data source. If they match, the table is Hive-compatible.
656- // If they don't, they're not, because of some other table property that made it
657- // not initially Hive-compatible.
658- HiveSerDe .sourceToSerDe(provider.get) == rawTable.storage.serde
659- }
660- } else {
661- // All non-DS tables are treated as regular Hive tables.
662- true
663- }
664-
665- val updatedTable = if (isHiveCompatible) {
646+ val updatedTable = if (isHiveCompatible(rawTable)) {
666647 val _updated = rawTable.copy(properties = updatedProperties, schema = schema)
667648 verifyColumnNames(_updated)
668649 _updated
@@ -1224,6 +1205,27 @@ private[spark] class HiveExternalCatalog(conf: SparkConf, hadoopConf: Configurat
12241205 client.listFunctions(db, pattern)
12251206 }
12261207
1208+ /** Detect whether a table is stored with Hive-compatible metadata. */
1209+ private def isHiveCompatible (table : CatalogTable ): Boolean = {
1210+ val provider = table.provider.orElse(table.properties.get(DATASOURCE_PROVIDER ))
1211+ if (provider.isDefined && provider != Some (DDLUtils .HIVE_PROVIDER )) {
1212+ table.properties.get(DATASOURCE_HIVE_COMPATIBLE ) match {
1213+ case Some (value) =>
1214+ value.toBoolean
1215+ case _ =>
1216+ // If the property is not set, the table may have been created by an old version
1217+ // of Spark. Detect Hive compatibility by comparing the table's serde with the
1218+ // serde for the table's data source. If they match, the table is Hive-compatible.
1219+ // If they don't, they're not, because of some other table property that made it
1220+ // not initially Hive-compatible.
1221+ HiveSerDe .sourceToSerDe(provider.get) == table.storage.serde
1222+ }
1223+ } else {
1224+ // All non-DS tables are treated as regular Hive tables.
1225+ true
1226+ }
1227+ }
1228+
12271229}
12281230
12291231object HiveExternalCatalog {
0 commit comments