From 2b958c69e1c768f8982e8f9a59fe84bb116cf0b0 Mon Sep 17 00:00:00 2001 From: Alex Wiss-Wolferding Date: Mon, 6 Feb 2023 10:20:23 +0800 Subject: [PATCH] [KYUUBI #4218] Using DB and table name when checking Delta table schema. ### _Why are the changes needed?_ To close #4218 . This change ensures BI tools can list columns on Delta Lake tables in all schemas. image image ### _How was this patch tested?_ - [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible - [x] Add screenshots for manual tests if appropriate - [ ] [Run test](https://kyuubi.readthedocs.io/en/master/develop_tools/testing.html#running-tests) locally before make a pull request Closes #4219 from nousot-cloud-guy/feature/delta-db-schema. Closes #4218 569843213 [Alex Wiss-Wolferding] Reversing match order in getColumnsByCatalog. a6d973a3e [Alex Wiss-Wolferding] Revert "[KYUUBI #1458] Delta lake table columns won't show up in DBeaver." 20337dc96 [Alex Wiss-Wolferding] Revert "Using DB and table name when checking Delta table schema." f7e4675a7 [Alex Wiss-Wolferding] Using DB and table name when checking Delta table schema. Authored-by: Alex Wiss-Wolferding Signed-off-by: Cheng Pan --- .../engine/spark/shim/CatalogShim_v2_4.scala | 8 +------- .../engine/spark/shim/CatalogShim_v3_0.scala | 16 ++++++++-------- 2 files changed, 9 insertions(+), 15 deletions(-) diff --git a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/shim/CatalogShim_v2_4.scala b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/shim/CatalogShim_v2_4.scala index 5977cd415b0..3478abc6639 100644 --- a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/shim/CatalogShim_v2_4.scala +++ b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/shim/CatalogShim_v2_4.scala @@ -139,13 +139,7 @@ class CatalogShim_v2_4 extends SparkCatalogShim { databases.flatMap { db => val identifiers = catalog.listTables(db, tablePattern, includeLocalTempViews = true) catalog.getTablesByName(identifiers).flatMap { t => - val tableSchema = - if (t.provider.getOrElse("").equalsIgnoreCase("delta")) { - spark.table(t.identifier.table).schema - } else { - t.schema - } - tableSchema.zipWithIndex.filter(f => columnPattern.matcher(f._1.name).matches()) + t.schema.zipWithIndex.filter(f => columnPattern.matcher(f._1.name).matches()) .map { case (f, i) => toColumnResult(catalogName, t.database, t.identifier.table, f, i) } } } diff --git a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/shim/CatalogShim_v3_0.scala b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/shim/CatalogShim_v3_0.scala index d60f94ac755..50e641b59b2 100644 --- a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/shim/CatalogShim_v3_0.scala +++ b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/shim/CatalogShim_v3_0.scala @@ -188,14 +188,6 @@ class CatalogShim_v3_0 extends CatalogShim_v2_4 { val catalog = getCatalog(spark, catalogName) catalog match { - case builtin if builtin.name() == SESSION_CATALOG => - super.getColumnsByCatalog( - spark, - SESSION_CATALOG, - schemaPattern, - tablePattern, - columnPattern) - case tc: TableCatalog => val namespaces = listNamespacesWithPattern(catalog, schemaPattern) val tp = tablePattern.r.pattern @@ -210,6 +202,14 @@ class CatalogShim_v3_0 extends CatalogShim_v2_4 { table.schema.zipWithIndex.filter(f => columnPattern.matcher(f._1.name).matches()) .map { case (f, i) => toColumnResult(tc.name(), namespace, tableName, f, i) } } + + case builtin if builtin.name() == SESSION_CATALOG => + super.getColumnsByCatalog( + spark, + SESSION_CATALOG, + schemaPattern, + tablePattern, + columnPattern) } } }