Skip to content

Commit

Permalink
Delete checks
Browse files Browse the repository at this point in the history
  • Loading branch information
osopardo1 authored and osopardo1 committed Dec 7, 2023
1 parent b48074e commit ea11e90
Show file tree
Hide file tree
Showing 3 changed files with 0 additions and 12 deletions.
6 changes: 0 additions & 6 deletions src/main/scala/io/qbeast/spark/internal/QbeastOptions.scala
Original file line number Diff line number Diff line change
Expand Up @@ -131,10 +131,4 @@ object QbeastOptions {
}))
}

def checkQbeastProperties(parameters: Map[String, String]): Unit = {
require(
parameters.contains("columnsToIndex") || parameters.contains("columnstoindex"),
throw AnalysisExceptionFactory.create("'columnsToIndex is not specified"))
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ package io.qbeast.spark.internal.sources.catalog
import io.qbeast.context.QbeastContext
import io.qbeast.spark.internal.sources.v2.QbeastStagedTableImpl
import io.qbeast.spark.internal.sources.v2.QbeastTableImpl
import io.qbeast.spark.internal.QbeastOptions.checkQbeastProperties
import org.apache.hadoop.fs.Path
import org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException
import org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
Expand Down Expand Up @@ -109,7 +108,6 @@ class QbeastCatalog[T <: TableCatalog with SupportsNamespaces with FunctionCatal
properties: util.Map[String, String]): Table = {

if (QbeastCatalogUtils.isQbeastProvider(properties)) {
checkQbeastProperties(properties.asScala.toMap)
// Create the table
QbeastCatalogUtils.createQbeastTable(
ident,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ package io.qbeast.spark.internal.sources.v2

import io.qbeast.spark.internal.sources.catalog.CreationMode
import io.qbeast.spark.internal.sources.catalog.QbeastCatalogUtils
import io.qbeast.spark.internal.QbeastOptions.checkQbeastProperties
import io.qbeast.spark.table.IndexedTableFactory
import org.apache.spark.sql.catalyst.catalog.SessionCatalog
import org.apache.spark.sql.connector.catalog.Identifier
Expand Down Expand Up @@ -72,9 +71,6 @@ private[sources] class QbeastStagedTableImpl(
// we pass all the writeOptions to the properties as well
writeOptions.foreach { case (k, v) => props.put(k, v) }

// Check all the Qbeast properties are correctly specified
checkQbeastProperties(props.asScala.toMap)

// Creates the corresponding table on the Catalog and executes
// the writing of the dataFrame (if any)
QbeastCatalogUtils.createQbeastTable(
Expand Down

0 comments on commit ea11e90

Please sign in to comment.