Skip to content

Commit 35b5813

Browse files
committed
follow comment
1 parent f21cf43 commit 35b5813

File tree

4 files changed

+12
-10
lines changed

4 files changed

+12
-10
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/DataSourceScanExec.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -205,7 +205,7 @@ case class FileSourceScanExec(
205205
private def isDynamicPruningFilter(e: Expression): Boolean =
206206
e.find(_.isInstanceOf[PlanExpression[_]]).isDefined
207207

208-
@transient private lazy val selectedPartitions: Array[PartitionDirectory] = {
208+
@transient lazy val selectedPartitions: Array[PartitionDirectory] = {
209209
val optimizerMetadataTimeNs = relation.location.metadataOpsTimeNs.getOrElse(0L)
210210
val startTime = System.nanoTime()
211211
val ret =

sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/PruneFileSourcePartitionsSuite.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ import org.apache.spark.sql.catalyst.dsl.expressions._
2424
import org.apache.spark.sql.catalyst.dsl.plans._
2525
import org.apache.spark.sql.catalyst.plans.logical.{Filter, LogicalPlan, Project}
2626
import org.apache.spark.sql.catalyst.rules.RuleExecutor
27-
import org.apache.spark.sql.execution.FileSourceScanExec
27+
import org.apache.spark.sql.execution.{FileSourceScanExec, SparkPlan}
2828
import org.apache.spark.sql.execution.datasources.{CatalogFileIndex, HadoopFsRelation, LogicalRelation, PruneFileSourcePartitions}
2929
import org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat
3030
import org.apache.spark.sql.execution.joins.BroadcastHashJoinExec
@@ -109,9 +109,9 @@ class PruneFileSourcePartitionsSuite extends PrunePartitionSuiteBase {
109109
}
110110
}
111111

112-
override def getScanExecPartitionSize(query: String): Long = {
113-
sql(query).queryExecution.sparkPlan.collectFirst {
112+
override def getScanExecPartitionSize(plan: SparkPlan): Long = {
113+
plan.collectFirst {
114114
case p: FileSourceScanExec => p
115-
}.get.relation.location.inputFiles.length
115+
}.get.selectedPartitions.length
116116
}
117117
}

sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/PruneHiveTablePartitionsSuite.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ package org.apache.spark.sql.hive.execution
2020
import org.apache.spark.sql.catalyst.analysis.EliminateSubqueryAliases
2121
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
2222
import org.apache.spark.sql.catalyst.rules.RuleExecutor
23+
import org.apache.spark.sql.execution.SparkPlan
2324

2425
class PruneHiveTablePartitionsSuite extends PrunePartitionSuiteBase {
2526

@@ -54,8 +55,8 @@ class PruneHiveTablePartitionsSuite extends PrunePartitionSuiteBase {
5455
}
5556
}
5657

57-
override def getScanExecPartitionSize(query: String): Long = {
58-
sql(query).queryExecution.sparkPlan.collectFirst {
58+
override def getScanExecPartitionSize(plan: SparkPlan): Long = {
59+
plan.collectFirst {
5960
case p: HiveTableScanExec => p
6061
}.get.prunedPartitions.size
6162
}

sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/PrunePartitionSuiteBase.scala

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
package org.apache.spark.sql.hive.execution
1919

2020
import org.apache.spark.sql.QueryTest
21-
import org.apache.spark.sql.hive.HiveUtils
21+
import org.apache.spark.sql.execution.SparkPlan
2222
import org.apache.spark.sql.hive.test.TestHiveSingleton
2323
import org.apache.spark.sql.test.SQLTestUtils
2424

@@ -68,8 +68,9 @@ abstract class PrunePartitionSuiteBase extends QueryTest with SQLTestUtils with
6868
}
6969

7070
protected def assertPrunedPartitions(query: String, expected: Long): Unit = {
71-
assert(getScanExecPartitionSize(query) == expected)
71+
val plan = sql(query).queryExecution.sparkPlan
72+
assert(getScanExecPartitionSize(plan) == expected)
7273
}
7374

74-
protected def getScanExecPartitionSize(query: String): Long
75+
protected def getScanExecPartitionSize(plan: SparkPlan): Long
7576
}

0 commit comments

Comments
 (0)