|
17 | 17 |
|
18 | 18 | package org.apache.spark.sql.hive |
19 | 19 |
|
| 20 | +import java.net.URI |
| 21 | + |
20 | 22 | import org.apache.hadoop.conf.Configuration |
21 | 23 |
|
22 | | -import org.apache.spark.SparkFunSuite |
23 | | -import org.apache.spark.sql.SparkSession |
24 | 24 | import org.apache.spark.sql.catalyst.TableIdentifier |
25 | 25 | import org.apache.spark.sql.catalyst.analysis.SimpleFunctionRegistry |
| 26 | +import org.apache.spark.sql.catalyst.catalog.CatalogDatabase |
26 | 27 | import org.apache.spark.sql.catalyst.parser.CatalystSqlParser |
27 | 28 | import org.apache.spark.sql.catalyst.plans.logical.Range |
| 29 | +import org.apache.spark.sql.hive.test.TestHiveSingleton |
28 | 30 | import org.apache.spark.sql.internal.SQLConf |
| 31 | +import org.apache.spark.util.Utils |
29 | 32 |
|
30 | | -class HiveSessionCatalogSuite extends SparkFunSuite { |
| 33 | +class HiveSessionCatalogSuite extends TestHiveSingleton { |
31 | 34 |
|
32 | 35 | test("clone HiveSessionCatalog") { |
33 | | - val hiveSession = SparkSession.builder().master("local").enableHiveSupport().getOrCreate() |
34 | | - assert(hiveSession.sessionState.catalog.isInstanceOf[HiveSessionCatalog]) |
35 | | - val original = hiveSession.sessionState.catalog.asInstanceOf[HiveSessionCatalog] |
36 | | - |
37 | | - val tempTable1 = Range(1, 10, 1, 10) |
38 | | - original.createTempView("copytest1", tempTable1, overrideIfExists = false) |
39 | | - |
40 | | - // check if tables copied over |
41 | | - val clone = original.newSessionCatalogWith( |
42 | | - hiveSession, |
43 | | - new SQLConf, |
44 | | - new Configuration(), |
45 | | - new SimpleFunctionRegistry, |
46 | | - CatalystSqlParser) |
47 | | - assert(original ne clone) |
48 | | - assert(clone.getTempView("copytest1") == Option(tempTable1)) |
49 | | - |
50 | | - // check if clone and original independent |
51 | | - clone.dropTable(TableIdentifier("copytest1"), ignoreIfNotExists = false, purge = false) |
52 | | - assert(original.getTempView("copytest1") == Option(tempTable1)) |
53 | | - |
54 | | - val tempTable2 = Range(1, 20, 2, 10) |
55 | | - original.createTempView("copytest2", tempTable2, overrideIfExists = false) |
56 | | - assert(clone.getTempView("copytest2").isEmpty) |
| 36 | + val original = spark.sessionState.catalog.asInstanceOf[HiveSessionCatalog] |
| 37 | + |
| 38 | + val tempTableName1 = "copytest1" |
| 39 | + val tempTableName2 = "copytest2" |
| 40 | + try { |
| 41 | + val tempTable1 = Range(1, 10, 1, 10) |
| 42 | + original.createTempView(tempTableName1, tempTable1, overrideIfExists = false) |
| 43 | + |
| 44 | + // check if tables copied over |
| 45 | + val clone = original.newSessionCatalogWith( |
| 46 | + spark, |
| 47 | + new SQLConf, |
| 48 | + new Configuration(), |
| 49 | + new SimpleFunctionRegistry, |
| 50 | + CatalystSqlParser) |
| 51 | + assert(original ne clone) |
| 52 | + assert(clone.getTempView(tempTableName1) == Some(tempTable1)) |
| 53 | + |
| 54 | + // check if clone and original independent |
| 55 | + clone.dropTable(TableIdentifier(tempTableName1), ignoreIfNotExists = false, purge = false) |
| 56 | + assert(original.getTempView(tempTableName1) == Some(tempTable1)) |
| 57 | + |
| 58 | + val tempTable2 = Range(1, 20, 2, 10) |
| 59 | + original.createTempView(tempTableName2, tempTable2, overrideIfExists = false) |
| 60 | + assert(clone.getTempView(tempTableName2).isEmpty) |
| 61 | + } finally { |
| 62 | + // Drop the created temp views from the global singleton HiveSession. |
| 63 | + original.dropTable(TableIdentifier(tempTableName1), ignoreIfNotExists = true, purge = true) |
| 64 | + original.dropTable(TableIdentifier(tempTableName2), ignoreIfNotExists = true, purge = true) |
| 65 | + } |
| 66 | + } |
| 67 | + |
| 68 | + test("clone SessionCatalog - current db") { |
| 69 | + val original = spark.sessionState.catalog.asInstanceOf[HiveSessionCatalog] |
| 70 | + val originalCurrentDatabase = original.getCurrentDatabase |
| 71 | + val db1 = "db1" |
| 72 | + val db2 = "db2" |
| 73 | + val db3 = "db3" |
| 74 | + try { |
| 75 | + original.createDatabase(newDb(db1), ignoreIfExists = true) |
| 76 | + original.createDatabase(newDb(db2), ignoreIfExists = true) |
| 77 | + original.createDatabase(newDb(db3), ignoreIfExists = true) |
| 78 | + |
| 79 | + original.setCurrentDatabase(db1) |
| 80 | + |
| 81 | + // check if tables copied over |
| 82 | + val clone = original.newSessionCatalogWith( |
| 83 | + spark, |
| 84 | + new SQLConf, |
| 85 | + new Configuration(), |
| 86 | + new SimpleFunctionRegistry, |
| 87 | + CatalystSqlParser) |
| 88 | + |
| 89 | + // check if current db copied over |
| 90 | + assert(original ne clone) |
| 91 | + assert(clone.getCurrentDatabase == db1) |
| 92 | + |
| 93 | + // check if clone and original independent |
| 94 | + clone.setCurrentDatabase(db2) |
| 95 | + assert(original.getCurrentDatabase == db1) |
| 96 | + original.setCurrentDatabase(db3) |
| 97 | + assert(clone.getCurrentDatabase == db2) |
| 98 | + } finally { |
| 99 | + // Drop the created databases from the global singleton HiveSession. |
| 100 | + original.dropDatabase(db1, ignoreIfNotExists = true, cascade = true) |
| 101 | + original.dropDatabase(db2, ignoreIfNotExists = true, cascade = true) |
| 102 | + original.dropDatabase(db3, ignoreIfNotExists = true, cascade = true) |
| 103 | + original.setCurrentDatabase(originalCurrentDatabase) |
| 104 | + } |
| 105 | + } |
| 106 | + |
| 107 | + def newUriForDatabase(): URI = new URI(Utils.createTempDir().toURI.toString.stripSuffix("/")) |
| 108 | + |
| 109 | + def newDb(name: String): CatalogDatabase = { |
| 110 | + CatalogDatabase(name, name + " description", newUriForDatabase(), Map.empty) |
57 | 111 | } |
58 | 112 | } |
0 commit comments