Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -1080,9 +1080,6 @@ class Analyzer(override val catalogManager: CatalogManager)
}.getOrElse(write)
case _ => write
}

case u: UnresolvedV2Relation =>
CatalogV2Util.loadRelation(u.catalog, u.tableName).getOrElse(u)
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -133,13 +133,6 @@ trait CheckAnalysis extends PredicateHelper with LookupCatalog {
val tblName = write.table.asInstanceOf[UnresolvedRelation].multipartIdentifier
write.table.failAnalysis(s"Table or view not found: ${tblName.quoted}")

case u: UnresolvedV2Relation if isView(u.originalNameParts) =>
u.failAnalysis(
s"Invalid command: '${u.originalNameParts.quoted}' is a view not a table.")

case u: UnresolvedV2Relation =>
u.failAnalysis(s"Table not found: ${u.originalNameParts.quoted}")

case command: V2PartitionCommand =>
command.table match {
case r @ ResolvedTable(_, _, table, _) => table match {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.catalyst.plans.logical.{LeafNode, LogicalPlan, UnaryNode}
import org.apache.spark.sql.catalyst.trees.TreePattern._
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.connector.catalog.{Identifier, TableCatalog}
import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors}
import org.apache.spark.sql.types.{DataType, Metadata, StructType}
import org.apache.spark.sql.util.CaseInsensitiveStringMap
Expand Down Expand Up @@ -75,28 +74,6 @@ object UnresolvedRelation {
UnresolvedRelation(tableIdentifier.database.toSeq :+ tableIdentifier.table)
}

/**
* A variant of [[UnresolvedRelation]] which can only be resolved to a v2 relation
* (`DataSourceV2Relation`), not v1 relation or temp view.
*
* @param originalNameParts the original table identifier name parts before catalog is resolved.
* @param catalog The catalog which the table should be looked up from.
* @param tableName The name of the table to look up.
*/
case class UnresolvedV2Relation(
originalNameParts: Seq[String],
catalog: TableCatalog,
tableName: Identifier)
extends LeafNode with NamedRelation {
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._

override def name: String = originalNameParts.quoted

override def output: Seq[Attribute] = Nil

override lazy val resolved = false
}

/**
* An inline table that has not been resolved yet. Once resolved, it is turned by the analyzer into
* a [[org.apache.spark.sql.catalyst.plans.logical.LocalRelation]].
Expand Down