Skip to content

Commit cad1b63

Browse files
committed
nits - remove remaining traces of HiveSessionState.
1 parent 16f5bea commit cad1b63

File tree

5 files changed

+7
-8
lines changed

5 files changed

+7
-8
lines changed

sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1001,7 +1001,7 @@ object SparkSession {
10011001

10021002
/**
10031003
* Helper method to create an instance of `SessionState` based on `className` from conf.
1004-
* The result is either `SessionState` or `HiveSessionState`.
1004+
* The result is either `SessionState` or a Hive based `SessionState`.
10051005
*/
10061006
private def instantiateSessionState(
10071007
className: String,

sql/core/src/main/scala/org/apache/spark/sql/internal/SessionState.scala

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ import org.apache.spark.sql.catalyst.parser.ParserInterface
3232
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
3333
import org.apache.spark.sql.execution._
3434
import org.apache.spark.sql.streaming.StreamingQueryManager
35-
import org.apache.spark.sql.util.ExecutionListenerManager
35+
import org.apache.spark.sql.util.{ExecutionListenerManager, QueryExecutionListener}
3636

3737
/**
3838
* A class that holds all session-specific state in a given [[SparkSession]].
@@ -48,8 +48,7 @@ import org.apache.spark.sql.util.ExecutionListenerManager
4848
* @param optimizer Logical query plan optimizer.
4949
* @param planner Planner that converts optimized logical plans to physical plans
5050
* @param streamingQueryManager Interface to start and stop streaming queries.
51-
* @param listenerManager Interface to register custom
52-
* [[org.apache.spark.sql.util.QueryExecutionListener]]s
51+
* @param listenerManager Interface to register custom [[QueryExecutionListener]]s
5352
* @param resourceLoader Session shared resource loader to load JARs, files, etc
5453
* @param createQueryExecution Function used to create QueryExecution objects.
5554
* @param createClone Function used to create clones of the session state.
@@ -147,7 +146,7 @@ class SessionResourceLoader(session: SparkSession) extends FunctionResourceLoade
147146
/**
148147
* Add a jar path to [[SparkContext]] and the classloader.
149148
*
150-
* Note: this method seems not access any session state, but the subclass `HiveSessionState` needs
149+
* Note: this method seems not access any session state, but a Hive based `SessionState` needs
151150
* to add the jar to its hive client for the current session. Hence, it still needs to be in
152151
* [[SessionState]].
153152
*/

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ import org.apache.thrift.transport.TSocket
3838

3939
import org.apache.spark.internal.Logging
4040
import org.apache.spark.sql.AnalysisException
41-
import org.apache.spark.sql.hive.{HiveSessionState, HiveUtils}
41+
import org.apache.spark.sql.hive.HiveUtils
4242
import org.apache.spark.util.ShutdownHookManager
4343

4444
/**

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ import org.apache.spark.sql.internal.{BaseSessionStateBuilder, SessionResourceLo
3232
*/
3333
private[hive] object HiveSessionState {
3434
/**
35-
* Create a new Hive aware [[SessionState]]. for the given session.
35+
* Create a new Hive aware [[SessionState]] for the given session.
3636
*/
3737
def apply(session: SparkSession): SessionState = {
3838
new HiveSessionStateBuilder(session).build()

sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSessionStateSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ import org.apache.spark.sql._
2323
import org.apache.spark.sql.hive.test.TestHiveSingleton
2424

2525
/**
26-
* Run all tests from `SessionStateSuite` with a `HiveSessionState`.
26+
* Run all tests from `SessionStateSuite` with a Hive based `SessionState`.
2727
*/
2828
class HiveSessionStateSuite extends SessionStateSuite
2929
with TestHiveSingleton with BeforeAndAfterEach {

0 commit comments

Comments
 (0)