Skip to content

Commit 6f94d56

Browse files
committed
[SPARK-10845] [SQL] Makes spark.sql.hive.version a SQLConfEntry
When refactoring SQL options from plain strings to the strongly typed `SQLConfEntry`, `spark.sql.hive.version` wasn't migrated, and doesn't show up in the result of `SET -v`, as `SET -v` only shows public `SQLConfEntry` instances. This affects compatibility with Simba ODBC driver. This PR migrates this SQL option as a `SQLConfEntry` to fix this issue. Author: Cheng Lian <[email protected]> Closes #8925 from liancheng/spark-10845/hive-version-conf.
1 parent 6fcee90 commit 6f94d56

File tree

3 files changed

+32
-1
lines changed

3 files changed

+32
-1
lines changed

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,6 @@ object HiveThriftServer2 extends Logging {
5555
@DeveloperApi
5656
def startWithContext(sqlContext: HiveContext): Unit = {
5757
val server = new HiveThriftServer2(sqlContext)
58-
sqlContext.setConf("spark.sql.hive.version", HiveContext.hiveExecutionVersion)
5958
server.init(sqlContext.hiveconf)
6059
server.start()
6160
listener = new HiveThriftServer2Listener(server, sqlContext.conf)

sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ import java.io.File
2121
import java.net.URL
2222
import java.sql.{Date, DriverManager, SQLException, Statement}
2323

24+
import scala.collection.mutable
2425
import scala.collection.mutable.ArrayBuffer
2526
import scala.concurrent.ExecutionContext.Implicits.global
2627
import scala.concurrent.duration._
@@ -431,6 +432,32 @@ class HiveThriftBinaryServerSuite extends HiveThriftJdbcTest {
431432
}
432433
)
433434
}
435+
436+
test("Checks Hive version via SET -v") {
437+
withJdbcStatement { statement =>
438+
val resultSet = statement.executeQuery("SET -v")
439+
440+
val conf = mutable.Map.empty[String, String]
441+
while (resultSet.next()) {
442+
conf += resultSet.getString(1) -> resultSet.getString(2)
443+
}
444+
445+
assert(conf.get("spark.sql.hive.version") === Some("1.2.1"))
446+
}
447+
}
448+
449+
test("Checks Hive version via SET") {
450+
withJdbcStatement { statement =>
451+
val resultSet = statement.executeQuery("SET")
452+
453+
val conf = mutable.Map.empty[String, String]
454+
while (resultSet.next()) {
455+
conf += resultSet.getString(1) -> resultSet.getString(2)
456+
}
457+
458+
assert(conf.get("spark.sql.hive.version") === Some("1.2.1"))
459+
}
460+
}
434461
}
435462

436463
class HiveThriftHttpServerSuite extends HiveThriftJdbcTest {

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -610,6 +610,11 @@ private[hive] object HiveContext {
610610
doc = "Version of the Hive metastore. Available options are " +
611611
s"<code>0.12.0</code> through <code>$hiveExecutionVersion</code>.")
612612

613+
val HIVE_EXECUTION_VERSION = stringConf(
614+
key = "spark.sql.hive.version",
615+
defaultValue = Some(hiveExecutionVersion),
616+
doc = "Version of Hive used internally by Spark SQL.")
617+
613618
val HIVE_METASTORE_JARS = stringConf("spark.sql.hive.metastore.jars",
614619
defaultValue = Some("builtin"),
615620
doc = s"""

0 commit comments

Comments
 (0)