Skip to content

Commit 2772f0d

Browse files
author
Davies Liu
committed
fix new test failure
1 parent 6d776a9 commit 2772f0d

File tree

2 files changed

+4
-4
lines changed

2 files changed

+4
-4
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -321,9 +321,9 @@ object CatalystTypeConverters {
321321
row: Row,
322322
schema: StructType,
323323
converters: Array[Any => Any]): Row = {
324-
val ar = new Array[Any](row.size)
324+
val ar = new Array[Any](converters.size)
325325
var idx = 0
326-
while (idx < row.size) {
326+
while (idx < converters.size && idx < row.size) {
327327
ar(idx) = converters(idx)(row(idx))
328328
idx += 1
329329
}

sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -63,8 +63,8 @@ case class ExecutedCommand(cmd: RunnableCommand) extends SparkPlan {
6363
override def executeTake(limit: Int): Array[Row] = sideEffectResult.take(limit).toArray
6464

6565
override def execute(): RDD[Row] = {
66-
val converted = sideEffectResult.map(r => CatalystTypeConverters.convertToCatalyst(r, schema)
67-
.asInstanceOf[Row])
66+
val converted = sideEffectResult.map(r =>
67+
CatalystTypeConverters.convertToCatalyst(r, schema).asInstanceOf[Row])
6868
sqlContext.sparkContext.parallelize(converted, 1)
6969
}
7070
}

0 commit comments

Comments
 (0)