diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala index 50ee6cd4085ea..ff8f9f3d82fd6 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala @@ -18,6 +18,7 @@ package org.apache.spark.sql import org.apache.spark.annotation.InterfaceStability +import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan @@ -27,7 +28,7 @@ import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan * @since 1.3.0 */ @InterfaceStability.Stable -class AnalysisException protected[sql] ( +class AnalysisException( val message: String, val line: Option[Int] = None, val startPosition: Option[Int] = None, @@ -36,8 +37,19 @@ class AnalysisException protected[sql] ( val cause: Option[Throwable] = None) extends Exception(message, cause.orNull) with Serializable { + def withPlan(plan: LogicalPlan): AnalysisException = { + withPosition(plan.origin.line, plan.origin.startPosition, Option(plan)) + } + def withPosition(line: Option[Int], startPosition: Option[Int]): AnalysisException = { - val newException = new AnalysisException(message, line, startPosition) + withPosition(line, startPosition, None) + } + + private def withPosition( + line: Option[Int], + startPosition: Option[Int], + plan: Option[LogicalPlan]): AnalysisException = { + val newException = new AnalysisException(message, line, startPosition, plan) newException.setStackTrace(getStackTrace) newException } @@ -55,3 +67,113 @@ class AnalysisException protected[sql] ( s"$message;$lineAnnotation$positionAnnotation" } } + +object AnalysisException { + /** + * Create a no such database analysis exception. + */ + def noSuchDatabase(db: String): AnalysisException = { + new AnalysisException(s"Database '$db' not found") + } + + /** + * Create a database already exists analysis exception. + */ + def databaseAlreadyExists(db: String): AnalysisException = { + new AnalysisException(s"Database '$db' already exists") + } + + /** + * Create a no such table analysis exception. + */ + def noSuchTable(db: String, table: String): AnalysisException = { + new AnalysisException(s"Table or view '$table' not found in database '$db'") + } + + /** + * Create a table already exists analysis exception. + */ + def tableAlreadyExists(db: String, table: String): AnalysisException = { + new AnalysisException(s"Table or view '$table' already exists in database '$db'") + } + + /** + * Create a temporary table already exists analysis exception. + */ + def tempTableAlreadyExists(table: String): AnalysisException = { + new AnalysisException(s"Temporary table '$table' already exists") + } + + /** + * Create a no such partition analysis exception. + */ + def noSuchPartition(db: String, table: String, spec: TablePartitionSpec): AnalysisException = { + new AnalysisException( + s"Partition not found in table '$table' database '$db':\n" + spec.mkString("\n")) + } + + /** + * Create a partition already exists analysis exception. + */ + def partitionAlreadyExists( + db: String, + table: String, + spec: TablePartitionSpec): AnalysisException = { + new AnalysisException( + s"Partition already exists in table '$table' database '$db':\n" + spec.mkString("\n")) + } + + /** + * Create a no such partitions analysis exception. + */ + def noSuchPartitions( + db: String, + table: String, + specs: Seq[TablePartitionSpec]): AnalysisException = { + new AnalysisException( + s"The following partitions not found in table '$table' database '$db':\n" + + specs.mkString("\n===\n")) + } + + /** + * Create a partitions already exists analysis exception. + */ + def partitionsAlreadyExists( + db: String, + table: String, + specs: Seq[TablePartitionSpec]): AnalysisException = { + new AnalysisException( + s"The following partitions already exists in table '$table' database '$db':\n" + + specs.mkString("\n===\n")) + } + + /** + * Create a no such function exception. + */ + def noSuchFunction(db: String, func: String): AnalysisException = { + new AnalysisException( + s"Undefined function: '$func'. This function is neither a registered temporary " + + s"function nor a permanent function registered in the database '$db'.") + } + + /** + * Create a function already exists analysis exception. + */ + def functionAlreadyExists(db: String, func: String): AnalysisException = { + new AnalysisException(s"Function '$func' already exists in database '$db'") + } + + /** + * Create a no such permanent function exception. + */ + def noSuchPermanentFunction(db: String, func: String): AnalysisException = { + new AnalysisException(s"Function '$func' not found in database '$db'") + } + + /** + * Create a no such temporary function exception. + */ + def noSuchTempFunction(func: String): AnalysisException = { + new AnalysisException(s"Temporary function '$func' not found") + } +} diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala deleted file mode 100644 index 57f7a80bedc6c..0000000000000 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.spark.sql.catalyst.analysis - -import org.apache.spark.sql.AnalysisException -import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec - -/** - * Thrown by a catalog when an item already exists. The analyzer will rethrow the exception - * as an [[org.apache.spark.sql.AnalysisException]] with the correct position information. - */ -class DatabaseAlreadyExistsException(db: String) - extends AnalysisException(s"Database '$db' already exists") - -class TableAlreadyExistsException(db: String, table: String) - extends AnalysisException(s"Table or view '$table' already exists in database '$db'") - -class TempTableAlreadyExistsException(table: String) - extends AnalysisException(s"Temporary table '$table' already exists") - -class PartitionAlreadyExistsException(db: String, table: String, spec: TablePartitionSpec) - extends AnalysisException( - s"Partition already exists in table '$table' database '$db':\n" + spec.mkString("\n")) - -class PartitionsAlreadyExistException(db: String, table: String, specs: Seq[TablePartitionSpec]) - extends AnalysisException( - s"The following partitions already exists in table '$table' database '$db':\n" - + specs.mkString("\n===\n")) - -class FunctionAlreadyExistsException(db: String, func: String) - extends AnalysisException(s"Function '$func' already exists in database '$db'") diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala index eafeb4ac1ae55..4c8cb833ce2d1 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala @@ -639,13 +639,7 @@ class Analyzer( try { catalog.lookupRelation(tableIdentWithDb) } catch { - case _: NoSuchTableException => - u.failAnalysis(s"Table or view not found: ${tableIdentWithDb.unquotedString}") - // If the database is defined and that database is not found, throw an AnalysisException. - // Note that if the database is not defined, it is possible we are looking up a temp view. - case e: NoSuchDatabaseException => - u.failAnalysis(s"Table or view not found: ${tableIdentWithDb.unquotedString}, the " + - s"database ${e.db} doesn't exsits.") + case a: AnalysisException => throw a.withPlan(u) } } @@ -1122,7 +1116,9 @@ class Analyzer( override def apply(plan: LogicalPlan): LogicalPlan = plan.transformAllExpressions { case f: UnresolvedFunction if !catalog.functionExists(f.name) => withPosition(f) { - throw new NoSuchFunctionException(f.name.database.getOrElse("default"), f.name.funcName) + throw AnalysisException.noSuchFunction( + f.name.database.getOrElse("default"), + f.name.funcName) } } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala deleted file mode 100644 index f5aae60431c15..0000000000000 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.spark.sql.catalyst.analysis - -import org.apache.spark.sql.AnalysisException -import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec - - -/** - * Thrown by a catalog when an item cannot be found. The analyzer will rethrow the exception - * as an [[org.apache.spark.sql.AnalysisException]] with the correct position information. - */ -class NoSuchDatabaseException(val db: String) extends AnalysisException(s"Database '$db' not found") - -class NoSuchTableException(db: String, table: String) - extends AnalysisException(s"Table or view '$table' not found in database '$db'") - -class NoSuchPartitionException( - db: String, - table: String, - spec: TablePartitionSpec) - extends AnalysisException( - s"Partition not found in table '$table' database '$db':\n" + spec.mkString("\n")) - -class NoSuchPermanentFunctionException(db: String, func: String) - extends AnalysisException(s"Function '$func' not found in database '$db'") - -class NoSuchFunctionException(db: String, func: String) - extends AnalysisException( - s"Undefined function: '$func'. This function is neither a registered temporary function nor " + - s"a permanent function registered in the database '$db'.") - -class NoSuchPartitionsException(db: String, table: String, specs: Seq[TablePartitionSpec]) - extends AnalysisException( - s"The following partitions not found in table '$table' database '$db':\n" - + specs.mkString("\n===\n")) - -class NoSuchTempFunctionException(func: String) - extends AnalysisException(s"Temporary function '$func' not found") diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala index 08a01e8601897..d5429189a2919 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.catalyst.catalog -import org.apache.spark.sql.catalyst.analysis.{FunctionAlreadyExistsException, NoSuchDatabaseException, NoSuchFunctionException, NoSuchTableException} +import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.expressions.Expression import org.apache.spark.sql.types.StructType @@ -28,32 +28,32 @@ import org.apache.spark.sql.types.StructType * can be accessed in multiple threads. This is an external catalog because it is expected to * interact with external systems. * - * Implementations should throw [[NoSuchDatabaseException]] when databases don't exist. + * Implementations should throw an [[AnalysisException]] when databases don't exist. */ abstract class ExternalCatalog { import CatalogTypes.TablePartitionSpec protected def requireDbExists(db: String): Unit = { if (!databaseExists(db)) { - throw new NoSuchDatabaseException(db) + throw AnalysisException.noSuchDatabase(db) } } protected def requireTableExists(db: String, table: String): Unit = { if (!tableExists(db, table)) { - throw new NoSuchTableException(db = db, table = table) + throw AnalysisException.noSuchTable(db, table) } } protected def requireFunctionExists(db: String, funcName: String): Unit = { if (!functionExists(db, funcName)) { - throw new NoSuchFunctionException(db = db, func = funcName) + throw AnalysisException.noSuchFunction(db, funcName) } } protected def requireFunctionNotExists(db: String, funcName: String): Unit = { if (functionExists(db, funcName)) { - throw new FunctionAlreadyExistsException(db = db, func = funcName) + throw AnalysisException.functionAlreadyExists(db, funcName) } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/GlobalTempViewManager.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/GlobalTempViewManager.scala index 6095ac0bc9c50..c9cacbec9834b 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/GlobalTempViewManager.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/GlobalTempViewManager.scala @@ -22,7 +22,6 @@ import javax.annotation.concurrent.GuardedBy import scala.collection.mutable import org.apache.spark.sql.AnalysisException -import org.apache.spark.sql.catalyst.analysis.TempTableAlreadyExistsException import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan import org.apache.spark.sql.catalyst.util.StringUtils @@ -58,7 +57,7 @@ class GlobalTempViewManager(val database: String) { viewDefinition: LogicalPlan, overrideIfExists: Boolean): Unit = synchronized { if (!overrideIfExists && viewDefinitions.contains(name)) { - throw new TempTableAlreadyExistsException(name) + throw AnalysisException.tempTableAlreadyExists(name) } viewDefinitions.put(name, viewDefinition) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala index 9ca1c71d1dcb1..719699ada51f6 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala @@ -68,7 +68,7 @@ class InMemoryCatalog( private def requireTableNotExists(db: String, table: String): Unit = { if (tableExists(db, table)) { - throw new TableAlreadyExistsException(db = db, table = table) + throw AnalysisException.tableAlreadyExists(db, table) } } @@ -76,9 +76,9 @@ class InMemoryCatalog( db: String, table: String, specs: Seq[TablePartitionSpec]): Unit = { - specs.foreach { s => - if (!partitionExists(db, table, s)) { - throw new NoSuchPartitionException(db = db, table = table, spec = s) + specs.foreach { spec => + if (!partitionExists(db, table, spec)) { + throw AnalysisException.noSuchPartition(db, table, spec) } } } @@ -87,9 +87,9 @@ class InMemoryCatalog( db: String, table: String, specs: Seq[TablePartitionSpec]): Unit = { - specs.foreach { s => - if (partitionExists(db, table, s)) { - throw new PartitionAlreadyExistsException(db = db, table = table, spec = s) + specs.foreach { spec => + if (partitionExists(db, table, spec)) { + throw AnalysisException.partitionAlreadyExists(db, table, spec) } } } @@ -103,7 +103,7 @@ class InMemoryCatalog( ignoreIfExists: Boolean): Unit = synchronized { if (catalog.contains(dbDefinition.name)) { if (!ignoreIfExists) { - throw new DatabaseAlreadyExistsException(dbDefinition.name) + throw AnalysisException.databaseAlreadyExists(dbDefinition.name) } } else { try { @@ -147,7 +147,7 @@ class InMemoryCatalog( catalog.remove(db) } else { if (!ignoreIfNotExists) { - throw new NoSuchDatabaseException(db) + throw AnalysisException.noSuchDatabase(db) } } } @@ -189,7 +189,7 @@ class InMemoryCatalog( val table = tableDefinition.identifier.table if (tableExists(db, table)) { if (!ignoreIfExists) { - throw new TableAlreadyExistsException(db = db, table = table) + throw AnalysisException.tableAlreadyExists(db, table) } } else { // Set the default table location if this is a managed table and its location is not @@ -259,7 +259,7 @@ class InMemoryCatalog( catalog(db).tables.remove(table) } else { if (!ignoreIfNotExists) { - throw new NoSuchTableException(db = db, table = table) + throw AnalysisException.noSuchTable(db, table) } } } @@ -374,7 +374,7 @@ class InMemoryCatalog( if (!ignoreIfExists) { val dupSpecs = parts.collect { case p if existingParts.contains(p.spec) => p.spec } if (dupSpecs.nonEmpty) { - throw new PartitionsAlreadyExistException(db = db, table = table, specs = dupSpecs) + throw AnalysisException.partitionsAlreadyExists(db, table, dupSpecs) } } @@ -415,7 +415,7 @@ class InMemoryCatalog( if (!ignoreIfNotExists) { val missingSpecs = partSpecs.collect { case s if !existingParts.contains(s) => s } if (missingSpecs.nonEmpty) { - throw new NoSuchPartitionsException(db = db, table = table, specs = missingSpecs) + throw AnalysisException.noSuchPartitions(db, table, missingSpecs) } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala index 6c6d600190b66..40e9e4189ab1b 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala @@ -147,21 +147,21 @@ class SessionCatalog( private def requireDbExists(db: String): Unit = { if (!databaseExists(db)) { - throw new NoSuchDatabaseException(db) + throw AnalysisException.noSuchDatabase(db) } } private def requireTableExists(name: TableIdentifier): Unit = { if (!tableExists(name)) { val db = name.database.getOrElse(currentDb) - throw new NoSuchTableException(db = db, table = name.table) + throw AnalysisException.noSuchTable(db, name.table) } } private def requireTableNotExists(name: TableIdentifier): Unit = { if (tableExists(name)) { val db = name.database.getOrElse(currentDb) - throw new TableAlreadyExistsException(db = db, table = name.table) + throw AnalysisException.tableAlreadyExists(db, name.table) } } @@ -366,7 +366,7 @@ class SessionCatalog( /** * Retrieve the metadata of an existing permanent table/view. If no database is specified, * assume the table/view is in the current database. If the specified table/view is not found - * in the database then a [[NoSuchTableException]] is thrown. + * in the database then a [[AnalysisException]] is thrown. */ def getTableMetadata(name: TableIdentifier): CatalogTable = { val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase)) @@ -391,7 +391,7 @@ class SessionCatalog( /** * Load files stored in given path into an existing metastore table. * If no database is specified, assume the table is in the current database. - * If the specified table is not found in the database then a [[NoSuchTableException]] is thrown. + * If the specified table is not found in the database then a [[AnalysisException]] is thrown. */ def loadTable( name: TableIdentifier, @@ -408,7 +408,7 @@ class SessionCatalog( /** * Load files stored in given path into the partition of an existing metastore table. * If no database is specified, assume the table is in the current database. - * If the specified table is not found in the database then a [[NoSuchTableException]] is thrown. + * If the specified table is not found in the database then a [[AnalysisException]] is thrown. */ def loadPartition( name: TableIdentifier, @@ -446,7 +446,7 @@ class SessionCatalog( overrideIfExists: Boolean): Unit = synchronized { val table = formatTableName(name) if (tempTables.contains(table) && !overrideIfExists) { - throw new TempTableAlreadyExistsException(name) + throw AnalysisException.tempTableAlreadyExists(name) } tempTables.put(table, tableDefinition) } @@ -545,7 +545,7 @@ class SessionCatalog( tableType = CatalogTableType.VIEW, storage = CatalogStorageFormat.empty, schema = plan.output.toStructType) - }.getOrElse(throw new NoSuchTableException(globalTempViewManager.database, table)) + }.getOrElse(throw AnalysisException.noSuchTable(globalTempViewManager.database, table)) } else { getTableMetadata(name) } @@ -613,7 +613,7 @@ class SessionCatalog( if (db == globalTempViewManager.database) { val viewExists = globalTempViewManager.remove(table) if (!viewExists && !ignoreIfNotExists) { - throw new NoSuchTableException(globalTempViewManager.database, table) + throw AnalysisException.noSuchTable(globalTempViewManager.database, table) } } else { if (name.database.isDefined || !tempTables.contains(table)) { @@ -623,7 +623,7 @@ class SessionCatalog( if (tableExists(TableIdentifier(table, Option(db)))) { externalCatalog.dropTable(db, table, ignoreIfNotExists = true, purge = purge) } else if (!ignoreIfNotExists) { - throw new NoSuchTableException(db = db, table = table) + throw AnalysisException.noSuchTable(db, table) } } else { tempTables.remove(table) @@ -653,7 +653,7 @@ class SessionCatalog( if (db == globalTempViewManager.database) { globalTempViewManager.get(table).map { viewDef => SubqueryAlias(table, viewDef) - }.getOrElse(throw new NoSuchTableException(db, table)) + }.getOrElse(throw AnalysisException.noSuchTable(db, table)) } else if (name.database.isDefined || !tempTables.contains(table)) { val metadata = externalCatalog.getTable(db, table) if (metadata.tableType == CatalogTableType.VIEW) { @@ -993,7 +993,7 @@ class SessionCatalog( if (!functionExists(identifier)) { externalCatalog.createFunction(db, newFuncDefinition) } else if (!ignoreIfExists) { - throw new FunctionAlreadyExistsException(db = db, func = identifier.toString) + throw AnalysisException.functionAlreadyExists(db, identifier.toString) } } @@ -1016,7 +1016,7 @@ class SessionCatalog( } externalCatalog.dropFunction(db, name.funcName) } else if (!ignoreIfNotExists) { - throw new NoSuchFunctionException(db = db, func = identifier.toString) + throw AnalysisException.noSuchFunction(db, identifier.toString) } } @@ -1086,7 +1086,7 @@ class SessionCatalog( */ def dropTempFunction(name: String, ignoreIfNotExists: Boolean): Unit = { if (!functionRegistry.dropFunction(name) && !ignoreIfNotExists) { - throw new NoSuchTempFunctionException(name) + throw AnalysisException.noSuchTempFunction(name) } } @@ -1106,7 +1106,7 @@ class SessionCatalog( } protected def failFunctionLookup(name: String): Nothing = { - throw new NoSuchFunctionException(db = currentDb, func = name) + throw AnalysisException.noSuchFunction(currentDb, name) } /** @@ -1175,7 +1175,6 @@ class SessionCatalog( externalCatalog.getFunction(currentDb, name.funcName) } catch { case e: AnalysisException => failFunctionLookup(name.funcName) - case e: NoSuchPermanentFunctionException => failFunctionLookup(name.funcName) } loadFunctionResources(catalogFunction.resources) // Please note that qualifiedName is provided by the user. However, diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala index 42db4398e5072..5eae69414dd78 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala @@ -27,8 +27,6 @@ import org.scalatest.BeforeAndAfterEach import org.apache.spark.SparkFunSuite import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier} -import org.apache.spark.sql.catalyst.analysis.{FunctionAlreadyExistsException, NoSuchDatabaseException, NoSuchFunctionException} -import org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException import org.apache.spark.sql.catalyst.dsl.expressions._ import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.types._ @@ -171,7 +169,7 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac val catalog = newBasicCatalog() assert(catalog.listTables("db2").toSet == Set("tbl1", "tbl2")) val table = newTable("tbl1", "db2") - intercept[TableAlreadyExistsException] { + intercept[AnalysisException] { catalog.createTable(table, ignoreIfExists = false) } } @@ -667,14 +665,14 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac test("create function when database does not exist") { val catalog = newBasicCatalog() - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.createFunction("does_not_exist", newFunc()) } } test("create function that already exists") { val catalog = newBasicCatalog() - intercept[FunctionAlreadyExistsException] { + intercept[AnalysisException] { catalog.createFunction("db2", newFunc("func1")) } } @@ -688,14 +686,14 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac test("drop function when database does not exist") { val catalog = newBasicCatalog() - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.dropFunction("does_not_exist", "something") } } test("drop function that does not exist") { val catalog = newBasicCatalog() - intercept[NoSuchFunctionException] { + intercept[AnalysisException] { catalog.dropFunction("db2", "does_not_exist") } } @@ -705,14 +703,14 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac assert(catalog.getFunction("db2", "func1") == CatalogFunction(FunctionIdentifier("func1", Some("db2")), funcClass, Seq.empty[FunctionResource])) - intercept[NoSuchFunctionException] { + intercept[AnalysisException] { catalog.getFunction("db2", "does_not_exist") } } test("get function when database does not exist") { val catalog = newBasicCatalog() - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.getFunction("does_not_exist", "func1") } } @@ -722,15 +720,15 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac val newName = "funcky" assert(catalog.getFunction("db2", "func1").className == funcClass) catalog.renameFunction("db2", "func1", newName) - intercept[NoSuchFunctionException] { catalog.getFunction("db2", "func1") } + intercept[AnalysisException] { catalog.getFunction("db2", "func1") } assert(catalog.getFunction("db2", newName).identifier.funcName == newName) assert(catalog.getFunction("db2", newName).className == funcClass) - intercept[NoSuchFunctionException] { catalog.renameFunction("db2", "does_not_exist", "me") } + intercept[AnalysisException] { catalog.renameFunction("db2", "does_not_exist", "me") } } test("rename function when database does not exist") { val catalog = newBasicCatalog() - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.renameFunction("does_not_exist", "func1", "func5") } } @@ -738,7 +736,7 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac test("rename function when new function already exists") { val catalog = newBasicCatalog() catalog.createFunction("db2", newFunc("func2", Some("db2"))) - intercept[FunctionAlreadyExistsException] { + intercept[AnalysisException] { catalog.renameFunction("db2", "func1", "func2") } } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala index be8903000a0d1..f1dee2040213e 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala @@ -116,7 +116,7 @@ abstract class SessionCatalogSuite extends PlanTest { test("get database should throw exception when the database does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.getDatabaseMetadata("db_that_does_not_exist") } } @@ -178,7 +178,7 @@ abstract class SessionCatalogSuite extends PlanTest { assert(e.contains( "org.apache.hadoop.hive.metastore.api.NoSuchObjectException: db_that_does_not_exist")) } else { - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.dropDatabase("db_that_does_not_exist", ignoreIfNotExists = false, cascade = false) } } @@ -191,7 +191,7 @@ abstract class SessionCatalogSuite extends PlanTest { catalog.setCurrentDatabase("db1") assert(catalog.getCurrentDatabase == "db1") catalog.dropDatabase("db1", ignoreIfNotExists = false, cascade = true) - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.createTable(newTable("tbl1", "db1"), ignoreIfExists = false) } catalog.setCurrentDatabase("default") @@ -217,7 +217,7 @@ abstract class SessionCatalogSuite extends PlanTest { test("alter database should throw exception when the database does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.alterDatabase(newDb("unknown_db")) } } @@ -228,7 +228,7 @@ abstract class SessionCatalogSuite extends PlanTest { assert(catalog.getCurrentDatabase == "default") catalog.setCurrentDatabase("db2") assert(catalog.getCurrentDatabase == "db2") - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.setCurrentDatabase("deebo") } catalog.createDatabase(newDb("deebo"), ignoreIfExists = false) @@ -266,14 +266,14 @@ abstract class SessionCatalogSuite extends PlanTest { test("create table when database does not exist") { withBasicCatalog { catalog => // Creating table in non-existent database should always fail - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.createTable(newTable("tbl1", "does_not_exist"), ignoreIfExists = false) } - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.createTable(newTable("tbl1", "does_not_exist"), ignoreIfExists = true) } // Table already exists - intercept[TableAlreadyExistsException] { + intercept[AnalysisException] { catalog.createTable(newTable("tbl1", "db2"), ignoreIfExists = false) } catalog.createTable(newTable("tbl1", "db2"), ignoreIfExists = true) @@ -290,7 +290,7 @@ abstract class SessionCatalogSuite extends PlanTest { assert(catalog.getTempView("tbl2") == Option(tempTable2)) assert(catalog.getTempView("tbl3").isEmpty) // Temporary table already exists - intercept[TempTableAlreadyExistsException] { + intercept[AnalysisException] { catalog.createTempView("tbl1", tempTable1, overrideIfExists = false) } // Temporary table already exists but we override it @@ -315,15 +315,15 @@ abstract class SessionCatalogSuite extends PlanTest { test("drop table when database/table does not exist") { withBasicCatalog { catalog => // Should always throw exception when the database does not exist - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.dropTable(TableIdentifier("tbl1", Some("unknown_db")), ignoreIfNotExists = false, purge = false) } - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.dropTable(TableIdentifier("tbl1", Some("unknown_db")), ignoreIfNotExists = true, purge = false) } - intercept[NoSuchTableException] { + intercept[AnalysisException] { catalog.dropTable(TableIdentifier("unknown_table", Some("db2")), ignoreIfNotExists = false, purge = false) } @@ -373,7 +373,7 @@ abstract class SessionCatalogSuite extends PlanTest { TableIdentifier("tblone", Some("db2")), TableIdentifier("tblones", Some("db1"))) } // The new table already exists - intercept[TableAlreadyExistsException] { + intercept[AnalysisException] { catalog.renameTable( TableIdentifier("tblone", Some("db2")), TableIdentifier("table_two")) @@ -390,10 +390,10 @@ abstract class SessionCatalogSuite extends PlanTest { test("rename table when database/table does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.renameTable(TableIdentifier("tbl1", Some("unknown_db")), TableIdentifier("tbl2")) } - intercept[NoSuchTableException] { + intercept[AnalysisException] { catalog.renameTable(TableIdentifier("unknown_table", Some("db2")), TableIdentifier("tbl2")) } } @@ -439,10 +439,10 @@ abstract class SessionCatalogSuite extends PlanTest { test("alter table when database/table does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.alterTable(newTable("tbl1", "unknown_db")) } - intercept[NoSuchTableException] { + intercept[AnalysisException] { catalog.alterTable(newTable("unknown_table", "db2")) } } @@ -489,10 +489,10 @@ abstract class SessionCatalogSuite extends PlanTest { test("get table when database/table does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.getTableMetadata(TableIdentifier("tbl1", Some("unknown_db"))) } - intercept[NoSuchTableException] { + intercept[AnalysisException] { catalog.getTableMetadata(TableIdentifier("unknown_table", Some("db2"))) } } @@ -503,7 +503,7 @@ abstract class SessionCatalogSuite extends PlanTest { assert(catalog.getTableMetadataOption(TableIdentifier("tbl1", Some("db2"))) == Option(catalog.externalCatalog.getTable("db2", "tbl1"))) assert(catalog.getTableMetadataOption(TableIdentifier("unknown_table", Some("db2"))).isEmpty) - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.getTableMetadataOption(TableIdentifier("tbl1", Some("unknown_db"))) } } @@ -569,11 +569,11 @@ abstract class SessionCatalogSuite extends PlanTest { test("getTempViewOrPermanentTableMetadata on temporary views") { withBasicCatalog { catalog => val tempTable = Range(1, 10, 2, 10) - intercept[NoSuchTableException] { + intercept[AnalysisException] { catalog.getTempViewOrPermanentTableMetadata(TableIdentifier("view1")) }.getMessage - intercept[NoSuchTableException] { + intercept[AnalysisException] { catalog.getTempViewOrPermanentTableMetadata(TableIdentifier("view1", Some("default"))) }.getMessage @@ -583,7 +583,7 @@ abstract class SessionCatalogSuite extends PlanTest { assert(catalog.getTempViewOrPermanentTableMetadata( TableIdentifier("view1")).schema(0).name == "id") - intercept[NoSuchTableException] { + intercept[AnalysisException] { catalog.getTempViewOrPermanentTableMetadata(TableIdentifier("view1", Some("default"))) }.getMessage } @@ -601,7 +601,7 @@ abstract class SessionCatalogSuite extends PlanTest { TableIdentifier("tbl4"), TableIdentifier("tbl1", Some("db2")), TableIdentifier("tbl2", Some("db2")))) - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.listTables("unknown_db") } } @@ -621,7 +621,7 @@ abstract class SessionCatalogSuite extends PlanTest { TableIdentifier("tbl2", Some("db2")))) assert(catalog.listTables("db2", "*1").toSet == Set(TableIdentifier("tbl1"), TableIdentifier("tbl1", Some("db2")))) - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.listTables("unknown_db", "*") } } @@ -650,11 +650,11 @@ abstract class SessionCatalogSuite extends PlanTest { test("create partitions when database/table does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.createPartitions( TableIdentifier("tbl1", Some("unknown_db")), Seq(), ignoreIfExists = false) } - intercept[NoSuchTableException] { + intercept[AnalysisException] { catalog.createPartitions( TableIdentifier("does_not_exist", Some("db2")), Seq(), ignoreIfExists = false) } @@ -743,7 +743,7 @@ abstract class SessionCatalogSuite extends PlanTest { test("drop partitions when database/table does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.dropPartitions( TableIdentifier("tbl1", Some("unknown_db")), Seq(), @@ -751,7 +751,7 @@ abstract class SessionCatalogSuite extends PlanTest { purge = false, retainData = false) } - intercept[NoSuchTableException] { + intercept[AnalysisException] { catalog.dropPartitions( TableIdentifier("does_not_exist", Some("db2")), Seq(), @@ -837,10 +837,10 @@ abstract class SessionCatalogSuite extends PlanTest { test("get partition when database/table does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.getPartition(TableIdentifier("tbl1", Some("unknown_db")), part1.spec) } - intercept[NoSuchTableException] { + intercept[AnalysisException] { catalog.getPartition(TableIdentifier("does_not_exist", Some("db2")), part1.spec) } } @@ -904,11 +904,11 @@ abstract class SessionCatalogSuite extends PlanTest { test("rename partitions when database/table does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.renamePartitions( TableIdentifier("tbl1", Some("unknown_db")), Seq(part1.spec), Seq(part2.spec)) } - intercept[NoSuchTableException] { + intercept[AnalysisException] { catalog.renamePartitions( TableIdentifier("does_not_exist", Some("db2")), Seq(part1.spec), Seq(part2.spec)) } @@ -981,10 +981,10 @@ abstract class SessionCatalogSuite extends PlanTest { test("alter partitions when database/table does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.alterPartitions(TableIdentifier("tbl1", Some("unknown_db")), Seq(part1)) } - intercept[NoSuchTableException] { + intercept[AnalysisException] { catalog.alterPartitions(TableIdentifier("does_not_exist", Some("db2")), Seq(part1)) } } @@ -1097,10 +1097,10 @@ abstract class SessionCatalogSuite extends PlanTest { test("list partitions when database/table does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.listPartitions(TableIdentifier("tbl1", Some("unknown_db"))) } - intercept[NoSuchTableException] { + intercept[AnalysisException] { catalog.listPartitions(TableIdentifier("does_not_exist", Some("db2"))) } } @@ -1142,7 +1142,7 @@ abstract class SessionCatalogSuite extends PlanTest { test("create function when database does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.createFunction( newFunc("func5", Some("does_not_exist")), ignoreIfExists = false) } @@ -1151,7 +1151,7 @@ abstract class SessionCatalogSuite extends PlanTest { test("create function that already exists") { withBasicCatalog { catalog => - intercept[FunctionAlreadyExistsException] { + intercept[AnalysisException] { catalog.createFunction(newFunc("func1", Some("db2")), ignoreIfExists = false) } catalog.createFunction(newFunc("func1", Some("db2")), ignoreIfExists = true) @@ -1170,7 +1170,7 @@ abstract class SessionCatalogSuite extends PlanTest { assert(catalog.lookupFunction(FunctionIdentifier("temp1"), arguments) === Literal(1)) assert(catalog.lookupFunction(FunctionIdentifier("temp2"), arguments) === Literal(3)) // Temporary function does not exist. - intercept[NoSuchFunctionException] { + intercept[AnalysisException] { catalog.lookupFunction(FunctionIdentifier("temp3"), arguments) } val tempFunc3 = (e: Seq[Expression]) => Literal(e.size) @@ -1232,11 +1232,11 @@ abstract class SessionCatalogSuite extends PlanTest { test("drop function when database/function does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.dropFunction( FunctionIdentifier("something", Some("unknown_db")), ignoreIfNotExists = false) } - intercept[NoSuchFunctionException] { + intercept[AnalysisException] { catalog.dropFunction(FunctionIdentifier("does_not_exist"), ignoreIfNotExists = false) } catalog.dropFunction(FunctionIdentifier("does_not_exist"), ignoreIfNotExists = true) @@ -1251,10 +1251,10 @@ abstract class SessionCatalogSuite extends PlanTest { val arguments = Seq(Literal(1), Literal(2), Literal(3)) assert(catalog.lookupFunction(FunctionIdentifier("func1"), arguments) === Literal(1)) catalog.dropTempFunction("func1", ignoreIfNotExists = false) - intercept[NoSuchFunctionException] { + intercept[AnalysisException] { catalog.lookupFunction(FunctionIdentifier("func1"), arguments) } - intercept[NoSuchTempFunctionException] { + intercept[AnalysisException] { catalog.dropTempFunction("func1", ignoreIfNotExists = false) } catalog.dropTempFunction("func1", ignoreIfNotExists = true) @@ -1275,10 +1275,10 @@ abstract class SessionCatalogSuite extends PlanTest { test("get function when database/function does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.getFunctionMetadata(FunctionIdentifier("func1", Some("unknown_db"))) } - intercept[NoSuchFunctionException] { + intercept[AnalysisException] { catalog.getFunctionMetadata(FunctionIdentifier("does_not_exist", Some("db2"))) } } @@ -1292,7 +1292,7 @@ abstract class SessionCatalogSuite extends PlanTest { assert(catalog.lookupFunction( FunctionIdentifier("func1"), Seq(Literal(1), Literal(2), Literal(3))) == Literal(1)) catalog.dropTempFunction("func1", ignoreIfNotExists = false) - intercept[NoSuchFunctionException] { + intercept[AnalysisException] { catalog.lookupFunction(FunctionIdentifier("func1"), Seq(Literal(1), Literal(2), Literal(3))) } } @@ -1326,7 +1326,7 @@ abstract class SessionCatalogSuite extends PlanTest { test("list functions when database does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[AnalysisException] { catalog.listFunctions("unknown_db", "func*") } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/cache.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/cache.scala index 336f14dd97aea..f1e7c54f020de 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/cache.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/cache.scala @@ -17,9 +17,8 @@ package org.apache.spark.sql.execution.command -import org.apache.spark.sql.{Dataset, Row, SparkSession} +import org.apache.spark.sql.{AnalysisException, Dataset, Row, SparkSession} import org.apache.spark.sql.catalyst.TableIdentifier -import org.apache.spark.sql.catalyst.analysis.NoSuchTableException import org.apache.spark.sql.catalyst.plans.QueryPlan import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan @@ -59,7 +58,7 @@ case class UncacheTableCommand( try { sparkSession.catalog.uncacheTable(tableId) } catch { - case _: NoSuchTableException if ifExists => // don't throw + case _: AnalysisException if ifExists => // don't throw } Seq.empty[Row] } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala index 55540563ef911..4552af5609366 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala @@ -30,7 +30,7 @@ import org.apache.hadoop.mapred.{FileInputFormat, JobConf} import org.apache.spark.sql.{AnalysisException, Row, SparkSession} import org.apache.spark.sql.catalyst.TableIdentifier -import org.apache.spark.sql.catalyst.analysis.{NoSuchTableException, Resolver} +import org.apache.spark.sql.catalyst.analysis.Resolver import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference} @@ -203,7 +203,7 @@ case class DropTableCommand( try { sparkSession.sharedState.cacheManager.uncacheQuery(sparkSession.table(tableName)) } catch { - case _: NoSuchTableException if ifExists => + case _: AnalysisException if ifExists => case NonFatal(e) => log.warn(e.toString, e) } catalog.refreshTable(tableName) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/functions.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/functions.scala index 545082324f0d3..a9387a3e88b6e 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/functions.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/functions.scala @@ -21,9 +21,9 @@ import java.util.Locale import org.apache.spark.sql.{AnalysisException, Row, SparkSession} import org.apache.spark.sql.catalyst.FunctionIdentifier -import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, NoSuchFunctionException} +import org.apache.spark.sql.catalyst.analysis.FunctionRegistry import org.apache.spark.sql.catalyst.catalog.{CatalogFunction, FunctionResource} -import org.apache.spark.sql.catalyst.expressions.{Attribute, ExpressionInfo} +import org.apache.spark.sql.catalyst.expressions.Attribute import org.apache.spark.sql.types.{StringType, StructField, StructType} @@ -119,7 +119,7 @@ case class DescribeFunctionCommand( "When `expr1` = `expr2`, returns `expr3`; " + "when `expr1` = `expr4`, return `expr5`; else return `expr6`.") :: Nil case _ => - try { + if (sparkSession.sessionState.catalog.functionExists(functionName)) { val info = sparkSession.sessionState.catalog.lookupFunctionInfo(functionName) val name = if (info.getDb != null) info.getDb + "." + info.getName else info.getName val result = @@ -133,8 +133,8 @@ case class DescribeFunctionCommand( } else { result } - } catch { - case _: NoSuchFunctionException => Seq(Row(s"Function: $functionName not found.")) + } else { + Seq(Row(s"Function: $functionName not found.")) } } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index ebf03e1bf8869..c237f0718058f 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -20,24 +20,21 @@ package org.apache.spark.sql.execution.command import java.io.File import java.net.URI import java.nio.file.FileSystems -import java.util.Date import scala.collection.mutable.ArrayBuffer import scala.util.control.NonFatal import scala.util.Try -import org.apache.commons.lang3.StringEscapeUtils import org.apache.hadoop.fs.Path import org.apache.spark.sql.{AnalysisException, Row, SparkSession} import org.apache.spark.sql.catalyst.TableIdentifier -import org.apache.spark.sql.catalyst.analysis.NoSuchPartitionException import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.catalyst.catalog.CatalogTableType._ import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference} import org.apache.spark.sql.catalyst.util.quoteIdentifier -import org.apache.spark.sql.execution.datasources.{DataSource, FileFormat, PartitioningUtils} +import org.apache.spark.sql.execution.datasources.{DataSource, PartitioningUtils} import org.apache.spark.sql.execution.datasources.csv.CSVFileFormat import org.apache.spark.sql.execution.datasources.json.JsonFileFormat import org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat @@ -456,7 +453,7 @@ case class TruncateTableCommand( // Fail if the partition spec is fully specified (not partial) and the partition does not // exist. for (spec <- partitionSpec if partLocations.isEmpty && spec.size == partCols.length) { - throw new NoSuchPartitionException(table.database, table.identifier.table, spec) + throw AnalysisException.noSuchPartition(table.database, table.identifier.table, spec) } partLocations diff --git a/sql/core/src/test/resources/sql-tests/results/change-column.sql.out b/sql/core/src/test/resources/sql-tests/results/change-column.sql.out index 678a3f0f0a3c6..0aaebbd1db009 100644 --- a/sql/core/src/test/resources/sql-tests/results/change-column.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/change-column.sql.out @@ -256,7 +256,7 @@ ALTER TABLE temp_view CHANGE a a INT COMMENT 'this is column a' -- !query 25 schema struct<> -- !query 25 output -org.apache.spark.sql.catalyst.analysis.NoSuchTableException +org.apache.spark.sql.AnalysisException Table or view 'temp_view' not found in database 'default'; @@ -273,7 +273,7 @@ ALTER TABLE global_temp.global_temp_view CHANGE a a INT COMMENT 'this is column -- !query 27 schema struct<> -- !query 27 output -org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException +org.apache.spark.sql.AnalysisException Database 'global_temp' not found; diff --git a/sql/core/src/test/resources/sql-tests/results/cte.sql.out b/sql/core/src/test/resources/sql-tests/results/cte.sql.out index a446c2cd183da..baf60d32c9eaa 100644 --- a/sql/core/src/test/resources/sql-tests/results/cte.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cte.sql.out @@ -24,7 +24,7 @@ WITH s AS (SELECT 1 FROM s) SELECT * FROM s struct<> -- !query 2 output org.apache.spark.sql.AnalysisException -Table or view not found: s; line 1 pos 25 +Table or view 's' not found in database 'default'; line 1 pos 25 -- !query 3 @@ -43,7 +43,7 @@ WITH s1 AS (SELECT 1 FROM s2), s2 AS (SELECT 1 FROM s1) SELECT * FROM s1, s2 struct<> -- !query 4 output org.apache.spark.sql.AnalysisException -Table or view not found: s2; line 1 pos 26 +Table or view 's2' not found in database 'default'; line 1 pos 26 -- !query 5 diff --git a/sql/core/src/test/resources/sql-tests/results/describe.sql.out b/sql/core/src/test/resources/sql-tests/results/describe.sql.out index de10b29f3c65b..de401e25e0d0d 100644 --- a/sql/core/src/test/resources/sql-tests/results/describe.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/describe.sql.out @@ -234,7 +234,7 @@ DESC t PARTITION (c='Us', d=2) -- !query 13 schema struct<> -- !query 13 output -org.apache.spark.sql.catalyst.analysis.NoSuchPartitionException +org.apache.spark.sql.AnalysisException Partition not found in table 't' database 'default': c -> Us d -> 2; diff --git a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out index 8f2a54f7c24e2..423b2edae858b 100644 --- a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out @@ -196,7 +196,7 @@ SHOW TABLE EXTENDED LIKE 'show_t*' PARTITION(c='Us', d=1) -- !query 16 schema struct<> -- !query 16 output -org.apache.spark.sql.catalyst.analysis.NoSuchTableException +org.apache.spark.sql.AnalysisException Table or view 'show_t*' not found in database 'showdb'; @@ -223,7 +223,7 @@ SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(c='Ch', d=1) -- !query 19 schema struct<> -- !query 19 output -org.apache.spark.sql.catalyst.analysis.NoSuchPartitionException +org.apache.spark.sql.AnalysisException Partition not found in table 'show_t1' database 'showdb': c -> Ch d -> 1; diff --git a/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out b/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out index 05c3a083ee3b3..92e01ac067a88 100644 --- a/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out @@ -93,7 +93,7 @@ SHOW COLUMNS IN badtable FROM showdb -- !query 10 schema struct<> -- !query 10 output -org.apache.spark.sql.catalyst.analysis.NoSuchTableException +org.apache.spark.sql.AnalysisException Table or view 'badtable' not found in database 'showdb'; @@ -129,7 +129,7 @@ SHOW COLUMNS IN showdb.showcolumn3 -- !query 14 schema struct<> -- !query 14 output -org.apache.spark.sql.catalyst.analysis.NoSuchTableException +org.apache.spark.sql.AnalysisException Table or view 'showcolumn3' not found in database 'showdb'; @@ -138,7 +138,7 @@ SHOW COLUMNS IN showcolumn3 FROM showdb -- !query 15 schema struct<> -- !query 15 output -org.apache.spark.sql.catalyst.analysis.NoSuchTableException +org.apache.spark.sql.AnalysisException Table or view 'showcolumn3' not found in database 'showdb'; @@ -147,7 +147,7 @@ SHOW COLUMNS IN showcolumn4 -- !query 16 schema struct<> -- !query 16 output -org.apache.spark.sql.catalyst.analysis.NoSuchTableException +org.apache.spark.sql.AnalysisException Table or view 'showcolumn4' not found in database 'showdb'; diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala index 0dd9296a3f0ff..3b3f8b7878120 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala @@ -1651,7 +1651,7 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext { var e = intercept[AnalysisException] { sql("select * from in_valid_table") } - assert(e.message.contains("Table or view not found")) + assert(e.message.contains("Table or view 'in_valid_table' not found")) e = intercept[AnalysisException] { sql("select * from no_db.no_table").show() diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/GlobalTempViewSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/GlobalTempViewSuite.scala index 5c63c6a414f93..4f7716bdfe9c8 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/GlobalTempViewSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/GlobalTempViewSuite.scala @@ -20,7 +20,6 @@ package org.apache.spark.sql.execution import org.apache.spark.sql.{AnalysisException, QueryTest, Row} import org.apache.spark.sql.catalog.Table import org.apache.spark.sql.catalyst.TableIdentifier -import org.apache.spark.sql.catalyst.analysis.NoSuchTableException import org.apache.spark.sql.test.SharedSQLContext import org.apache.spark.sql.types.StructType @@ -39,18 +38,18 @@ class GlobalTempViewSuite extends QueryTest with SharedSQLContext { // If there is no database in table name, we should try local temp view first, if not found, // try table/view in current database, which is "default" in this case. So we expect - // NoSuchTableException here. - intercept[NoSuchTableException](spark.table("src")) + // AnalysisException here. + intercept[AnalysisException](spark.table("src")) // Use qualified name to refer to the global temp view explicitly. checkAnswer(spark.table(s"$globalTempDB.src"), Row(1, "a")) // Table name without database will never refer to a global temp view. - intercept[NoSuchTableException](sql("DROP VIEW src")) + intercept[AnalysisException](sql("DROP VIEW src")) sql(s"DROP VIEW $globalTempDB.src") // The global temp view should be dropped successfully. - intercept[NoSuchTableException](spark.table(s"$globalTempDB.src")) + intercept[AnalysisException](spark.table(s"$globalTempDB.src")) // We can also use Dataset API to create global temp view Seq(1 -> "a").toDF("i", "j").createGlobalTempView("src") @@ -58,7 +57,7 @@ class GlobalTempViewSuite extends QueryTest with SharedSQLContext { // Use qualified name to rename a global temp view. sql(s"ALTER VIEW $globalTempDB.src RENAME TO src2") - intercept[NoSuchTableException](spark.table(s"$globalTempDB.src")) + intercept[AnalysisException](spark.table(s"$globalTempDB.src")) checkAnswer(spark.table(s"$globalTempDB.src2"), Row(1, "a")) // Use qualified name to alter a global temp view. @@ -67,7 +66,7 @@ class GlobalTempViewSuite extends QueryTest with SharedSQLContext { // We can also use Catalog API to drop global temp view spark.catalog.dropGlobalTempView("src2") - intercept[NoSuchTableException](spark.table(s"$globalTempDB.src2")) + intercept[AnalysisException](spark.table(s"$globalTempDB.src2")) } test("global temp view is shared among all sessions") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala index d32716c18ddfb..a25b1f74df0e8 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala @@ -19,7 +19,6 @@ package org.apache.spark.sql.execution import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.TableIdentifier -import org.apache.spark.sql.catalyst.analysis.NoSuchTableException import org.apache.spark.sql.test.{SharedSQLContext, SQLTestUtils} class SimpleSQLViewSuite extends SQLViewSuite with SharedSQLContext @@ -160,7 +159,7 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils { } private def assertNoSuchTable(query: String): Unit = { - intercept[NoSuchTableException] { + intercept[AnalysisException] { sql(query) } } @@ -196,16 +195,16 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils { assertInvalidReference("CREATE OR REPLACE VIEW myabcdview AS SELECT * FROM table_not_exist345") // A column that does not exist - intercept[AnalysisException] { - sql("CREATE OR REPLACE VIEW myabcdview AS SELECT random1234 FROM jt").collect() - } + assertInvalid("CREATE OR REPLACE VIEW myabcdview AS SELECT random1234 FROM jt") } private def assertInvalidReference(query: String): Unit = { - val e = intercept[AnalysisException] { - sql(query) - }.getMessage - assert(e.contains("Table or view not found")) + assertInvalid(query, "Table or view", "not found") + } + + private def assertInvalid(query: String, msgs: String*): Unit = { + val e = intercept[AnalysisException](sql(query)).getMessage + msgs.foreach(msg => assert(e.contains(msg))) } @@ -529,7 +528,7 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils { } } } - assertInvalidReference("SELECT * FROM view1") + assertInvalid("SELECT * FROM view1", "Database", "not found") // Fail if the referenced table is invalid. withTable("table2") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index 2f4eb1b15519b..33f88daec395a 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -26,7 +26,7 @@ import org.scalatest.BeforeAndAfterEach import org.apache.spark.sql.{AnalysisException, QueryTest, Row, SaveMode} import org.apache.spark.sql.catalyst.TableIdentifier -import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, NoSuchPartitionException, NoSuchTableException, TempTableAlreadyExistsException} +import org.apache.spark.sql.catalyst.analysis.FunctionRegistry import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec import org.apache.spark.sql.internal.SQLConf @@ -702,7 +702,7 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils { Row("1997", "Ford") :: Nil) // Fails if creating a new view with the same name - intercept[TempTableAlreadyExistsException] { + intercept[AnalysisException] { sql( s""" |CREATE TEMPORARY VIEW testview @@ -793,10 +793,10 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils { spark.range(10).createOrReplaceTempView("tab1") sql("ALTER TABLE tab1 RENAME TO tab2") checkAnswer(spark.table("tab2"), spark.range(10).toDF()) - intercept[NoSuchTableException] { spark.table("tab1") } + intercept[AnalysisException] { spark.table("tab1") } sql("ALTER VIEW tab2 RENAME TO tab1") checkAnswer(spark.table("tab1"), spark.range(10).toDF()) - intercept[NoSuchTableException] { spark.table("tab2") } + intercept[AnalysisException] { spark.table("tab2") } } } @@ -1435,12 +1435,12 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils { Set(Map("a" -> "10", "b" -> "p"), Map("a" -> "20", "b" -> "c"), Map("a" -> "3", "b" -> "p"))) // table to alter does not exist - intercept[NoSuchTableException] { + intercept[AnalysisException] { sql("ALTER TABLE does_not_exist PARTITION (c='3') RENAME TO PARTITION (c='333')") } // partition to rename does not exist - intercept[NoSuchPartitionException] { + intercept[AnalysisException] { sql("ALTER TABLE tab1 PARTITION (a='not_found', b='1') RENAME TO PARTITION (a='1', b='2')") } @@ -1679,7 +1679,7 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils { assert(spark.table("partTable").count() == data.count()) // throw exception if no partition is matched for the given non-partial partition spec. - intercept[NoSuchPartitionException] { + intercept[AnalysisException] { sql("TRUNCATE TABLE partTable PARTITION (width=100, length=100)") } @@ -1717,7 +1717,7 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils { test("block creating duplicate temp table") { withView("t_temp") { sql("CREATE TEMPORARY VIEW t_temp AS SELECT 1, 2") - val e = intercept[TempTableAlreadyExistsException] { + val e = intercept[AnalysisException] { sql("CREATE TEMPORARY TABLE t_temp (c3 int, c4 string) USING JSON") }.getMessage assert(e.contains("Temporary table 't_temp' already exists")) @@ -1732,7 +1732,7 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils { (1 to 10).map { i => (i, i) }.toDF("a", "b").createTempView("my_temp_tab") sql(s"CREATE TABLE my_ext_tab using parquet LOCATION '${tempDir.toURI}'") sql(s"CREATE VIEW my_view AS SELECT 1") - intercept[NoSuchTableException] { + intercept[AnalysisException] { sql("TRUNCATE TABLE my_temp_tab") } assertUnsupported("TRUNCATE TABLE my_ext_tab") diff --git a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala index 44c0fc70d066b..c3296c4e95a56 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala @@ -32,7 +32,6 @@ import org.scalatest.concurrent.Eventually import org.apache.spark.SparkFunSuite import org.apache.spark.sql._ -import org.apache.spark.sql.catalyst.analysis.NoSuchTableException import org.apache.spark.sql.catalyst.catalog.SessionCatalog.DEFAULT_DATABASE import org.apache.spark.sql.catalyst.FunctionIdentifier import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan @@ -193,7 +192,7 @@ private[sql] trait SQLTestUtils // If the test failed part way, we don't want to mask the failure by failing to remove // temp tables that never got created. try tableNames.foreach(spark.catalog.dropTempView) catch { - case _: NoSuchTableException => + case _: AnalysisException => } } } diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala index d3cec11bd7567..a09b3330b12c6 100644 --- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala +++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala @@ -232,7 +232,7 @@ class CliSuite extends SparkFunSuite with BeforeAndAfterAll with Logging { runCliWithin(timeout = 2.minute, errorResponses = Seq("AnalysisException"))( "select * from nonexistent_table;" - -> "Error in query: Table or view not found: nonexistent_table;" + -> "Error in query: Table or view 'nonexistent_table' not found" ) } diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala index 8b0fdf49cefab..df45ccdc3b508 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala @@ -34,7 +34,6 @@ import org.apache.spark.{SparkConf, SparkException} import org.apache.spark.internal.Logging import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.TableIdentifier -import org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.catalyst.catalog.ExternalCatalogUtils._ import org.apache.spark.sql.catalyst.expressions._ @@ -204,7 +203,7 @@ private[spark] class HiveExternalCatalog(conf: SparkConf, hadoopConf: Configurat verifyTableProperties(tableDefinition) if (tableExists(db, table) && !ignoreIfExists) { - throw new TableAlreadyExistsException(db = db, table = table) + throw AnalysisException.tableAlreadyExists(db, table) } if (tableDefinition.tableType == VIEW) { diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClient.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClient.scala index 16a80f9fff452..fb9afdff1d50b 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClient.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClient.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.hive.client import java.io.PrintStream -import org.apache.spark.sql.catalyst.analysis._ +import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec import org.apache.spark.sql.catalyst.expressions.Expression @@ -69,9 +69,11 @@ private[hive] trait HiveClient { /** Return whether a table/view with the specified name exists. */ def tableExists(dbName: String, tableName: String): Boolean - /** Returns the specified table, or throws [[NoSuchTableException]]. */ + /** Returns the specified table, or throws [[AnalysisException]]. */ final def getTable(dbName: String, tableName: String): CatalogTable = { - getTableOption(dbName, tableName).getOrElse(throw new NoSuchTableException(dbName, tableName)) + getTableOption(dbName, tableName).getOrElse { + throw AnalysisException.noSuchTable(dbName, tableName) + } } /** Returns the metadata for the specified table or None if it doesn't exist. */ @@ -144,13 +146,13 @@ private[hive] trait HiveClient { table: String, newParts: Seq[CatalogTablePartition]): Unit - /** Returns the specified partition, or throws [[NoSuchPartitionException]]. */ + /** Returns the specified partition, or throws [[AnalysisException]]. */ final def getPartition( dbName: String, tableName: String, spec: TablePartitionSpec): CatalogTablePartition = { getPartitionOption(dbName, tableName, spec).getOrElse { - throw new NoSuchPartitionException(dbName, tableName, spec) + throw AnalysisException.noSuchPartition(dbName, tableName, spec) } } @@ -241,7 +243,7 @@ private[hive] trait HiveClient { /** Return an existing function in the database, assuming it exists. */ final def getFunction(db: String, name: String): CatalogFunction = { - getFunctionOption(db, name).getOrElse(throw new NoSuchPermanentFunctionException(db, name)) + getFunctionOption(db, name).getOrElse(throw AnalysisException.noSuchPermanentFunction(db, name)) } /** Return an existing function in the database, or None if it doesn't exist. */ diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala index 387ec4f967233..1dc48abee55aa 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala @@ -41,7 +41,6 @@ import org.apache.spark.internal.Logging import org.apache.spark.metrics.source.HiveCatalogMetrics import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.TableIdentifier -import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchPartitionException} import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec import org.apache.spark.sql.catalyst.expressions.Expression @@ -312,7 +311,7 @@ private[hive] class HiveClientImpl( if (databaseExists(databaseName)) { state.setCurrentDatabase(databaseName) } else { - throw new NoSuchDatabaseException(databaseName) + throw AnalysisException.noSuchDatabase(databaseName) } } @@ -352,7 +351,7 @@ private[hive] class HiveClientImpl( description = d.getDescription, locationUri = CatalogUtils.stringToURI(d.getLocationUri), properties = Option(d.getParameters).map(_.asScala.toMap).orNull) - }.getOrElse(throw new NoSuchDatabaseException(dbName)) + }.getOrElse(throw AnalysisException.noSuchDatabase(dbName)) } override def databaseExists(dbName: String): Boolean = withHiveState { @@ -529,7 +528,7 @@ private[hive] class HiveClientImpl( specs.zip(newSpecs).foreach { case (oldSpec, newSpec) => val hivePart = getPartitionOption(catalogTable, oldSpec) .map { p => toHivePartition(p.copy(spec = newSpec), hiveTable) } - .getOrElse { throw new NoSuchPartitionException(db, table, oldSpec) } + .getOrElse { throw AnalysisException.noSuchPartition(db, table, oldSpec) } client.renamePartition(hiveTable, oldSpec.asJava, hivePart) } } diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala index 7abb9f06b1310..5a72408f6098e 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala @@ -41,7 +41,6 @@ import org.apache.hadoop.hive.serde.serdeConstants import org.apache.spark.internal.Logging import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.FunctionIdentifier -import org.apache.spark.sql.catalyst.analysis.NoSuchPermanentFunctionException import org.apache.spark.sql.catalyst.catalog.{CatalogFunction, CatalogTablePartition, CatalogUtils, FunctionResource, FunctionResourceType} import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.internal.SQLConf @@ -422,15 +421,15 @@ private[client] class Shim_v0_12 extends Shim with Logging { } def dropFunction(hive: Hive, db: String, name: String): Unit = { - throw new NoSuchPermanentFunctionException(db, name) + throw AnalysisException.noSuchPermanentFunction(db, name) } def renameFunction(hive: Hive, db: String, oldName: String, newName: String): Unit = { - throw new NoSuchPermanentFunctionException(db, oldName) + throw AnalysisException.noSuchPermanentFunction(db, oldName) } def alterFunction(hive: Hive, db: String, func: CatalogFunction): Unit = { - throw new NoSuchPermanentFunctionException(db, func.identifier.funcName) + throw AnalysisException.noSuchPermanentFunction(db, func.identifier.funcName) } def getFunctionOption(hive: Hive, db: String, name: String): Option[CatalogFunction] = { @@ -529,7 +528,7 @@ private[client] class Shim_v0_13 extends Shim_v0_12 { override def renameFunction(hive: Hive, db: String, oldName: String, newName: String): Unit = { val catalogFunc = getFunctionOption(hive, db, oldName) - .getOrElse(throw new NoSuchPermanentFunctionException(db, oldName)) + .getOrElse(throw AnalysisException.noSuchPermanentFunction(db, oldName)) .copy(identifier = FunctionIdentifier(newName, Some(db))) val hiveFunc = toHiveFunction(catalogFunc, db) hive.alterFunction(db, oldName, hiveFunc) diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala index d3cbf898e2439..79cf077177b45 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala @@ -20,7 +20,6 @@ package org.apache.spark.sql.hive import java.io.File import org.apache.spark.sql.{AnalysisException, Dataset, QueryTest, SaveMode} -import org.apache.spark.sql.catalyst.analysis.NoSuchTableException import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.execution.columnar.InMemoryTableScanExec import org.apache.spark.sql.execution.datasources.{CatalogFileIndex, HadoopFsRelation, LogicalRelation} @@ -103,11 +102,11 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with TestHiveSingleto test("uncache of nonexistant tables") { // make sure table doesn't exist - intercept[NoSuchTableException](spark.table("nonexistantTable")) - intercept[NoSuchTableException] { + intercept[AnalysisException](spark.table("nonexistantTable")) + intercept[AnalysisException] { spark.catalog.uncacheTable("nonexistantTable") } - intercept[NoSuchTableException] { + intercept[AnalysisException] { sql("UNCACHE TABLE nonexistantTable") } sql("UNCACHE TABLE IF EXISTS nonexistantTable") diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala index 7aff49c0fc3b1..aa745714e14dc 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala @@ -30,7 +30,6 @@ import org.apache.spark.SparkFunSuite import org.apache.spark.internal.Logging import org.apache.spark.sql.{AnalysisException, Row} import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier} -import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchPermanentFunctionException} import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.catalyst.expressions.{AttributeReference, EqualTo, Literal} import org.apache.spark.sql.catalyst.util.quietly @@ -171,7 +170,7 @@ class VersionsSuite extends SparkFunSuite with Logging { test(s"$version: getDatabase") { // No exception should be thrown client.getDatabase("default") - intercept[NoSuchDatabaseException](client.getDatabase("nonexist")) + intercept[AnalysisException](client.getDatabase("nonexist")) } test(s"$version: databaseExists") { @@ -438,7 +437,7 @@ class VersionsSuite extends SparkFunSuite with Logging { test(s"$version: renameFunction") { if (version == "0.12") { // Hive 0.12 doesn't allow customized permanent functions - intercept[NoSuchPermanentFunctionException] { + intercept[AnalysisException] { client.renameFunction("default", "func1", "func2") } } else { @@ -451,7 +450,7 @@ class VersionsSuite extends SparkFunSuite with Logging { val functionClass = "org.apache.spark.MyFunc2" if (version == "0.12") { // Hive 0.12 doesn't allow customized permanent functions - intercept[NoSuchPermanentFunctionException] { + intercept[AnalysisException] { client.alterFunction("default", function("func2", functionClass)) } } else { @@ -462,7 +461,7 @@ class VersionsSuite extends SparkFunSuite with Logging { test(s"$version: getFunction") { if (version == "0.12") { // Hive 0.12 doesn't allow customized permanent functions - intercept[NoSuchPermanentFunctionException] { + intercept[AnalysisException] { client.getFunction("default", "func2") } } else { @@ -494,7 +493,7 @@ class VersionsSuite extends SparkFunSuite with Logging { test(s"$version: dropFunction") { if (version == "0.12") { // Hive 0.12 doesn't support creating permanent functions - intercept[NoSuchPermanentFunctionException] { + intercept[AnalysisException] { client.dropFunction("default", "func2") } } else { diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala index 6937e97a47dc6..f6de40fd77936 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala @@ -23,7 +23,6 @@ import com.google.common.io.Files import org.apache.spark.sql.{AnalysisException, QueryTest, Row, SaveMode} import org.apache.spark.sql.catalyst.TableIdentifier -import org.apache.spark.sql.catalyst.analysis.NoSuchTableException import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, CatalogTable, CatalogTableType} import org.apache.spark.sql.hive.test.TestHiveSingleton import org.apache.spark.sql.test.SQLTestUtils @@ -127,7 +126,7 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto } test("show tblproperties for datasource table - errors") { - val message1 = intercept[NoSuchTableException] { + val message1 = intercept[AnalysisException] { sql("SHOW TBLPROPERTIES badtable") }.getMessage assert(message1.contains("Table or view 'badtable' not found in database 'default'")) @@ -405,7 +404,7 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto |USING org.apache.spark.sql.parquet.DefaultSource """.stripMargin) // An empty sequence of row is returned for session temporary table. - intercept[NoSuchTableException] { + intercept[AnalysisException] { sql("SHOW PARTITIONS parquet_temp") } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala index 3906968aaff10..31d9bae78cd33 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala @@ -25,7 +25,6 @@ import org.scalatest.BeforeAndAfterEach import org.apache.spark.SparkException import org.apache.spark.sql.{AnalysisException, QueryTest, Row, SaveMode} -import org.apache.spark.sql.catalyst.analysis.{NoSuchPartitionException, TableAlreadyExistsException} import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.execution.command.{DDLSuite, DDLUtils} @@ -1197,10 +1196,10 @@ class HiveDDLSuite s"CREATE INDEX $indexName ON TABLE $tabName (a) AS 'COMPACT' WITH DEFERRED REBUILD") val indexTabName = spark.sessionState.catalog.listTables("default", s"*$indexName*").head.table - intercept[TableAlreadyExistsException] { + intercept[AnalysisException] { sql(s"CREATE TABLE $indexTabName(b int)") } - intercept[TableAlreadyExistsException] { + intercept[AnalysisException] { sql(s"ALTER TABLE $tabName RENAME TO $indexTabName") } @@ -1346,7 +1345,7 @@ class HiveDDLSuite assert(spark.table("partTable").count() == data.count()) // throw exception if no partition is matched for the given non-partial partition spec. - intercept[NoSuchPartitionException] { + intercept[AnalysisException] { sql("TRUNCATE TABLE partTable PARTITION (width=100, length=100)") } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala index 75f3744ff35be..d18842232a35c 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala @@ -28,7 +28,7 @@ import org.apache.hadoop.fs.Path import org.apache.spark.TestUtils import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.TableIdentifier -import org.apache.spark.sql.catalyst.analysis.{EliminateSubqueryAliases, FunctionRegistry, NoSuchPartitionException} +import org.apache.spark.sql.catalyst.analysis.{EliminateSubqueryAliases, FunctionRegistry} import org.apache.spark.sql.catalyst.catalog.{CatalogRelation, CatalogTableType, CatalogUtils} import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SubqueryAlias}