Skip to content

Commit e3a467e

Browse files
committed
modify test case
1 parent 35d2b59 commit e3a467e

File tree

1 file changed

+17
-11
lines changed

1 file changed

+17
-11
lines changed

sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala

Lines changed: 17 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -939,15 +939,16 @@ object SPARK_19667_CREATE_TABLE {
939939
def main(args: Array[String]): Unit = {
940940
val spark = SparkSession.builder().enableHiveSupport().getOrCreate()
941941
try {
942-
val warehousePath = s"file:${spark.sharedState.warehousePath.stripSuffix("/")}"
942+
val warehousePath = new Path(spark.sharedState.warehousePath)
943+
val fs = warehousePath.getFileSystem(spark.sessionState.newHadoopConf())
943944
val defaultDB = spark.sessionState.catalog.getDatabaseMetadata("default")
944945
// default database use warehouse path as its location
945-
assert(defaultDB.locationUri.stripSuffix("/") == warehousePath)
946+
assert(new Path(defaultDB.locationUri) == fs.makeQualified(warehousePath))
946947
spark.sql("CREATE TABLE t(a string)")
947948

948949
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
949950
// table in default database use the location of default database which is also warehouse path
950-
assert(table.location.stripSuffix("/") == s"$warehousePath/t")
951+
assert(new Path(table.location) == fs.makeQualified(new Path(warehousePath, "t")))
951952
spark.sql("INSERT INTO TABLE t SELECT 1")
952953
assert(spark.sql("SELECT * FROM t").count == 1)
953954

@@ -956,7 +957,8 @@ object SPARK_19667_CREATE_TABLE {
956957
spark.sql("CREATE TABLE t1(b string)")
957958
val table1 = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t1"))
958959
// table in not default database use the location of its own database
959-
assert(table1.location.stripSuffix("/") == s"$warehousePath/not_default.db/t1")
960+
assert(new Path(table1.location) == fs.makeQualified(
961+
new Path(warehousePath, "not_default.db/t1")))
960962
} finally {
961963
spark.sql("USE default")
962964
}
@@ -967,45 +969,49 @@ object SPARK_19667_VERIFY_TABLE_PATH {
967969
def main(args: Array[String]): Unit = {
968970
val spark = SparkSession.builder().enableHiveSupport().getOrCreate()
969971
try {
970-
val warehousePath = s"file:${spark.sharedState.warehousePath.stripSuffix("/")}"
972+
val warehousePath = new Path(spark.sharedState.warehousePath)
973+
val fs = warehousePath.getFileSystem(spark.sessionState.newHadoopConf())
971974
val defaultDB = spark.sessionState.catalog.getDatabaseMetadata("default")
972975
// default database use warehouse path as its location
973-
assert(defaultDB.locationUri.stripSuffix("/") == warehousePath)
976+
assert(new Path(defaultDB.locationUri) == fs.makeQualified(warehousePath))
974977

975978
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
976979
// the table in default database created in job(SPARK_19667_CREATE_TABLE) above,
977980
// which has different warehouse path from this job, its location still equals to
978981
// the location when it's created.
979-
assert(table.location.stripSuffix("/") != s"$warehousePath/t")
982+
assert(new Path(table.location) != fs.makeQualified(new Path(warehousePath, "t")))
980983
assert(spark.sql("SELECT * FROM t").count == 1)
981984

982985
spark.sql("CREATE TABLE t3(d string)")
983986
val table3 = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t3"))
984987
// the table in default database created here in this job, it will use the warehouse path
985988
// of this job as its location
986-
assert(table3.location.stripSuffix("/") == s"$warehousePath/t3")
989+
assert(new Path(table3.location) == fs.makeQualified(new Path(warehousePath, "t3")))
987990

988991
spark.sql("USE not_default")
989992
val table1 = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t1"))
990993
// the table in not default database create in job(SPARK_19667_CREATE_TABLE) above,
991994
// which has different warehouse path from this job, its location still equals to
992995
// the location when it's created.
993-
assert(table1.location.stripSuffix("/") != s"$warehousePath/not_default.db/t1")
996+
assert(new Path(table1.location) != fs.makeQualified(
997+
new Path(warehousePath, "not_default.db/t1")))
994998
assert(!new File(s"$warehousePath/not_default.db/t1").exists())
995999

9961000
spark.sql("CREATE TABLE t2(c string)")
9971001
val table2 = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t2"))
9981002
// the table in not default database created here in this job, it will use the location
9991003
// of the database as its location, not the warehouse path in this job
1000-
assert(table2.location.stripSuffix("/") != s"$warehousePath/not_default.db/t2")
1004+
assert(new Path(table2.location) != fs.makeQualified(
1005+
new Path(warehousePath, "not_default.db/t2")))
10011006

10021007
spark.sql("CREATE DATABASE not_default_1")
10031008
spark.sql("USE not_default_1")
10041009
spark.sql("CREATE TABLE t4(e string)")
10051010
val table4 = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t4"))
10061011
// the table created in the database which created in this job, it will use the location
10071012
// of the database.
1008-
assert(table4.location.stripSuffix("/") == s"$warehousePath/not_default_1.db/t4")
1013+
assert(new Path(table4.location) == fs.makeQualified(
1014+
new Path(warehousePath, "not_default_1.db/t4")))
10091015

10101016
} finally {
10111017
spark.sql("DROP TABLE IF EXISTS t4")

0 commit comments

Comments
 (0)