Skip to content

Commit 9558823

Browse files
committed
[SPARK-32590][SQL] Remove fullOutput from RowDataSourceScanExec
1 parent 5b8444a commit 9558823

File tree

3 files changed

+3
-13
lines changed

3 files changed

+3
-13
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/DataSourceScanExec.scala

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -99,17 +99,14 @@ trait DataSourceScanExec extends LeafExecNode {
9999

100100
/** Physical plan node for scanning data from a relation. */
101101
case class RowDataSourceScanExec(
102-
fullOutput: Seq[Attribute],
103-
requiredColumnsIndex: Seq[Int],
102+
output: Seq[Attribute],
104103
filters: Set[Filter],
105104
handledFilters: Set[Filter],
106105
rdd: RDD[InternalRow],
107106
@transient relation: BaseRelation,
108107
tableIdentifier: Option[TableIdentifier])
109108
extends DataSourceScanExec with InputRDDCodegen {
110109

111-
def output: Seq[Attribute] = requiredColumnsIndex.map(fullOutput)
112-
113110
override lazy val metrics =
114111
Map("numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"))
115112

@@ -143,7 +140,6 @@ case class RowDataSourceScanExec(
143140
// Don't care about `rdd` and `tableIdentifier` when canonicalizing.
144141
override def doCanonicalize(): SparkPlan =
145142
copy(
146-
fullOutput.map(QueryPlan.normalizeExpressions(_, fullOutput)),
147143
rdd = null,
148144
tableIdentifier = None)
149145
}

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -307,7 +307,6 @@ case class DataSourceStrategy(conf: SQLConf) extends Strategy with Logging with
307307
case l @ LogicalRelation(baseRelation: TableScan, _, _, _) =>
308308
RowDataSourceScanExec(
309309
l.output,
310-
l.output.indices,
311310
Set.empty,
312311
Set.empty,
313312
toCatalystRDD(l, baseRelation.buildScan()),
@@ -379,8 +378,7 @@ case class DataSourceStrategy(conf: SQLConf) extends Strategy with Logging with
379378
.map(relation.attributeMap)
380379

381380
val scan = RowDataSourceScanExec(
382-
relation.output,
383-
requestedColumns.map(relation.output.indexOf),
381+
requestedColumns,
384382
pushedFilters.toSet,
385383
handledFilters,
386384
scanBuilder(requestedColumns, candidatePredicates, pushedFilters),
@@ -401,8 +399,7 @@ case class DataSourceStrategy(conf: SQLConf) extends Strategy with Logging with
401399
(projectSet ++ filterSet -- handledSet).map(relation.attributeMap).toSeq
402400

403401
val scan = RowDataSourceScanExec(
404-
relation.output,
405-
requestedColumns.map(relation.output.indexOf),
402+
requestedColumns,
406403
pushedFilters.toSet,
407404
handledFilters,
408405
scanBuilder(requestedColumns, candidatePredicates, pushedFilters),

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -64,11 +64,8 @@ class DataSourceV2Strategy(session: SparkSession) extends Strategy with Predicat
6464
}
6565
val rdd = v1Relation.buildScan()
6666
val unsafeRowRDD = DataSourceStrategy.toCatalystRDD(v1Relation, output, rdd)
67-
val originalOutputNames = relation.table.schema().map(_.name)
68-
val requiredColumnsIndex = output.map(_.name).map(originalOutputNames.indexOf)
6967
val dsScan = RowDataSourceScanExec(
7068
output,
71-
requiredColumnsIndex,
7269
translated.toSet,
7370
pushed.toSet,
7471
unsafeRowRDD,

0 commit comments

Comments
 (0)